#![allow(unknown_lints, non_local_definitions, unreachable_patterns)]
#![deny(renamed_and_removed_lints)]
#![deny(
anonymous_parameters,
deprecated_in_future,
late_bound_lifetime_arguments,
missing_copy_implementations,
missing_debug_implementations,
missing_docs,
path_statements,
patterns_in_fns_without_body,
rust_2018_idioms,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_extern_crates,
// We intentionally choose not to deny `unused_qualifications`. When items
// are added to the prelude (e.g., `core::mem::size_of`), this has the
// consequence of making some uses trigger this lint on the latest toolchain
// (e.g., `mem::size_of`), but fixing it (e.g. by replacing with `size_of`)
// does not work on older toolchains.
//
// We tested a more complicated fix in #1413, but ultimately decided that,
// since this lint is just a minor style lint, the complexity isn't worth it
// - it's fine to occasionally have unused qualifications slip through,
// especially since these do not affect our user-facing API in any way.
variant_size_differences
)]
#![cfg_attr(
__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS,
deny(fuzzy_provenance_casts, lossy_provenance_casts)
)]
#![deny(
clippy::all,
clippy::alloc_instead_of_core,
clippy::arithmetic_side_effects,
clippy::as_underscore,
clippy::assertions_on_result_states,
clippy::as_conversions,
clippy::correctness,
clippy::dbg_macro,
clippy::decimal_literal_representation,
clippy::double_must_use,
clippy::get_unwrap,
clippy::indexing_slicing,
clippy::missing_inline_in_public_items,
clippy::missing_safety_doc,
clippy::must_use_candidate,
clippy::must_use_unit,
clippy::obfuscated_if_else,
clippy::perf,
clippy::print_stdout,
clippy::return_self_not_must_use,
clippy::std_instead_of_core,
clippy::style,
clippy::suspicious,
clippy::todo,
clippy::undocumented_unsafe_blocks,
clippy::unimplemented,
clippy::unnested_or_patterns,
clippy::unwrap_used,
clippy::use_debug
)]
#![allow(clippy::needless_lifetimes, clippy::type_complexity, clippy::incompatible_msrv)]
#![deny(
rustdoc::bare_urls,
rustdoc::broken_intra_doc_links,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_html_tags,
rustdoc::invalid_rust_codeblocks,
rustdoc::missing_crate_level_docs,
rustdoc::private_intra_doc_links
)]
#![cfg_attr(any(test, kani), allow(
// In tests, you get line numbers and have access to source code, so panic
// messages are less important. You also often unwrap a lot, which would
// make expect'ing instead very verbose.
clippy::unwrap_used,
// In tests, there's no harm to "panic risks" - the worst that can happen is
// that your test will fail, and you'll fix it. By contrast, panic risks in
// production code introduce the possibly of code panicking unexpectedly "in
// the field".
clippy::arithmetic_side_effects,
clippy::indexing_slicing,
))]
#![cfg_attr(not(any(test, kani, feature = "std")), no_std)]
#![cfg_attr(
all(feature = "simd-nightly", any(target_arch = "x86", target_arch = "x86_64")),
expect(stable_features)
)]
#![cfg_attr(
all(feature = "simd-nightly", any(target_arch = "x86", target_arch = "x86_64")),
feature(stdarch_x86_avx512)
)]
#![cfg_attr(
all(feature = "simd-nightly", target_arch = "arm"),
feature(stdarch_arm_dsp, stdarch_arm_neon_intrinsics)
)]
#![cfg_attr(
all(feature = "simd-nightly", any(target_arch = "powerpc", target_arch = "powerpc64")),
feature(stdarch_powerpc)
)]
#![cfg_attr(feature = "float-nightly", feature(f16, f128))]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![cfg_attr(
__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS,
feature(layout_for_ptr, coverage_attribute)
)]
#[cfg(any(feature = "derive", test))]
extern crate self as zerocopy;
#[doc(hidden)]
#[macro_use]
pub mod util;
pub mod byte_slice;
pub mod byteorder;
mod deprecated;
#[doc(hidden)]
pub mod doctests;
pub mod error;
mod impls;
#[doc(hidden)]
pub mod layout;
mod macros;
#[doc(hidden)]
pub mod pointer;
mod r#ref;
mod split_at;
mod wrappers;
use core::{
cell::{Cell, UnsafeCell},
cmp::Ordering,
fmt::{self, Debug, Display, Formatter},
hash::Hasher,
marker::PhantomData,
mem::{self, ManuallyDrop, MaybeUninit as CoreMaybeUninit},
num::{
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize, Wrapping,
},
ops::{Deref, DerefMut},
ptr::{self, NonNull},
slice,
};
#[cfg(feature = "std")]
use std::io;
use crate::pointer::invariant::{self, BecauseExclusive};
pub use crate::{
byte_slice::*,
byteorder::*,
error::*,
r#ref::*,
split_at::{Split, SplitAt},
wrappers::*,
};
#[cfg(any(feature = "alloc", test, kani))]
extern crate alloc;
#[cfg(any(feature = "alloc", test))]
use alloc::{boxed::Box, vec::Vec};
#[cfg(any(feature = "alloc", test))]
use core::alloc::Layout;
use util::MetadataOf;
#[doc(hidden)]
pub use crate::layout::*;
#[doc(hidden)]
pub use crate::pointer::{invariant::BecauseImmutable, Maybe, Ptr};
#[allow(unused_imports)]
use crate::util::polyfills::{self, NonNullExt as _, NumExt as _};
#[rustversion::nightly]
#[cfg(all(test, not(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)))]
const _: () = {
#[deprecated = "some tests may be skipped due to missing RUSTFLAGS=\"--cfg __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS\""]
const _WARNING: () = ();
#[warn(deprecated)]
_WARNING
};
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::KnownLayout;
#[allow(unused)]
use {FromZeros as FromZeroes, IntoBytes as AsBytes, Ref as LayoutVerified};
#[cfg_attr(feature = "derive", doc = "[derive]: zerocopy_derive::KnownLayout")]
#[cfg_attr(
not(feature = "derive"),
doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.KnownLayout.html"),
)]
#[cfg_attr(
zerocopy_diagnostic_on_unimplemented_1_78_0,
diagnostic::on_unimplemented(note = "Consider adding `#[derive(KnownLayout)]` to `{Self}`")
)]
pub unsafe trait KnownLayout {
#[doc(hidden)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized;
type PointerMetadata: PointerMetadata;
#[doc(hidden)]
type MaybeUninit: ?Sized + KnownLayout<PointerMetadata = Self::PointerMetadata>;
#[doc(hidden)]
const LAYOUT: DstLayout;
#[doc(hidden)]
fn raw_from_ptr_len(bytes: NonNull<u8>, meta: Self::PointerMetadata) -> NonNull<Self>;
#[doc(hidden)]
fn pointer_to_metadata(ptr: *mut Self) -> Self::PointerMetadata;
#[doc(hidden)]
#[must_use]
#[inline(always)]
fn size_of_val_raw(ptr: NonNull<Self>) -> Option<usize> {
let meta = Self::pointer_to_metadata(ptr.as_ptr());
Self::size_for_metadata(meta)
}
#[doc(hidden)]
#[must_use]
#[inline(always)]
fn raw_dangling() -> NonNull<Self> {
let meta = Self::PointerMetadata::from_elem_count(0);
Self::raw_from_ptr_len(NonNull::dangling(), meta)
}
#[inline(always)]
fn size_for_metadata(meta: Self::PointerMetadata) -> Option<usize> {
meta.size_for_metadata(Self::LAYOUT)
}
}
#[inline(always)]
pub(crate) fn trailing_slice_layout<T>() -> TrailingSliceLayout
where
T: ?Sized + KnownLayout<PointerMetadata = usize>,
{
trait LayoutFacts {
const SIZE_INFO: TrailingSliceLayout;
}
impl<T: ?Sized> LayoutFacts for T
where
T: KnownLayout<PointerMetadata = usize>,
{
const SIZE_INFO: TrailingSliceLayout = match T::LAYOUT.size_info {
crate::SizeInfo::Sized { .. } => const_panic!("unreachable"),
crate::SizeInfo::SliceDst(info) => info,
};
}
T::SIZE_INFO
}
#[doc(hidden)]
pub trait PointerMetadata: Copy + Eq + Debug {
fn from_elem_count(elems: usize) -> Self;
fn size_for_metadata(self, layout: DstLayout) -> Option<usize>;
}
impl PointerMetadata for () {
#[inline]
#[allow(clippy::unused_unit)]
fn from_elem_count(_elems: usize) -> () {}
#[inline]
fn size_for_metadata(self, layout: DstLayout) -> Option<usize> {
match layout.size_info {
SizeInfo::Sized { size } => Some(size),
SizeInfo::SliceDst(_) => None,
}
}
}
impl PointerMetadata for usize {
#[inline]
fn from_elem_count(elems: usize) -> usize {
elems
}
#[inline]
fn size_for_metadata(self, layout: DstLayout) -> Option<usize> {
match layout.size_info {
SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }) => {
let slice_len = elem_size.checked_mul(self)?;
let without_padding = offset.checked_add(slice_len)?;
without_padding.checked_add(util::padding_needed_for(without_padding, layout.align))
}
SizeInfo::Sized { .. } => None,
}
}
}
unsafe impl<T> KnownLayout for [T] {
#[allow(clippy::missing_inline_in_public_items, dead_code)]
#[cfg_attr(
all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS),
coverage(off)
)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized,
{
}
type PointerMetadata = usize;
type MaybeUninit = [CoreMaybeUninit<T>];
const LAYOUT: DstLayout = DstLayout::for_slice::<T>();
#[inline(always)]
fn raw_from_ptr_len(data: NonNull<u8>, elems: usize) -> NonNull<Self> {
#[allow(unstable_name_collisions)]
NonNull::slice_from_raw_parts(data.cast::<T>(), elems)
}
#[inline(always)]
fn pointer_to_metadata(ptr: *mut [T]) -> usize {
#[allow(clippy::as_conversions)]
let slc = ptr as *const [()];
let slc = unsafe { &*slc };
slc.len()
}
}
#[rustfmt::skip]
impl_known_layout!(
(),
u8, i8, u16, i16, u32, i32, u64, i64, u128, i128, usize, isize, f32, f64,
bool, char,
NonZeroU8, NonZeroI8, NonZeroU16, NonZeroI16, NonZeroU32, NonZeroI32,
NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize
);
#[rustfmt::skip]
#[cfg(feature = "float-nightly")]
impl_known_layout!(
#[cfg_attr(doc_cfg, doc(cfg(feature = "float-nightly")))]
f16,
#[cfg_attr(doc_cfg, doc(cfg(feature = "float-nightly")))]
f128
);
#[rustfmt::skip]
impl_known_layout!(
T => Option<T>,
T: ?Sized => PhantomData<T>,
T => Wrapping<T>,
T => CoreMaybeUninit<T>,
T: ?Sized => *const T,
T: ?Sized => *mut T,
T: ?Sized => &'_ T,
T: ?Sized => &'_ mut T,
);
impl_known_layout!(const N: usize, T => [T; N]);
const _: () = unsafe {
unsafe_impl_known_layout!(
#[repr([u8])]
str
);
unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T)] ManuallyDrop<T>);
unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T)] UnsafeCell<T>);
unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T)] Cell<T>);
};
const _: () = unsafe {
unsafe_impl_known_layout!(T: ?Sized + KnownLayout => #[repr(T::MaybeUninit)] MaybeUninit<T>)
};
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::FromZeros;
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::Immutable;
#[cfg_attr(
feature = "derive",
doc = "[derive]: zerocopy_derive::Immutable",
doc = "[derive-analysis]: zerocopy_derive::Immutable#analysis"
)]
#[cfg_attr(
not(feature = "derive"),
doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Immutable.html"),
doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Immutable.html#analysis"),
)]
#[cfg_attr(
zerocopy_diagnostic_on_unimplemented_1_78_0,
diagnostic::on_unimplemented(note = "Consider adding `#[derive(Immutable)]` to `{Self}`")
)]
pub unsafe trait Immutable {
#[doc(hidden)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized;
}
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::TryFromBytes;
#[cfg_attr(feature = "derive", doc = "[derive]: zerocopy_derive::TryFromBytes")]
#[cfg_attr(
not(feature = "derive"),
doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.TryFromBytes.html"),
)]
#[cfg_attr(
zerocopy_diagnostic_on_unimplemented_1_78_0,
diagnostic::on_unimplemented(note = "Consider adding `#[derive(TryFromBytes)]` to `{Self}`")
)]
pub unsafe trait TryFromBytes {
#[doc(hidden)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized;
#[doc(hidden)]
fn is_bit_valid<A: invariant::Reference>(candidate: Maybe<'_, Self, A>) -> bool;
#[must_use = "has no side effects"]
#[inline]
fn try_ref_from_bytes(source: &[u8]) -> Result<&Self, TryCastError<&[u8], Self>>
where
Self: KnownLayout + Immutable,
{
static_assert_dst_is_not_zst!(Self);
match Ptr::from_ref(source).try_cast_into_no_leftover::<Self, BecauseImmutable>(None) {
Ok(source) => {
match source.try_into_valid() {
Ok(valid) => Ok(valid.as_ref()),
Err(e) => {
Err(e.map_src(|src| src.as_bytes::<BecauseImmutable>().as_ref()).into())
}
}
}
Err(e) => Err(e.map_src(Ptr::as_ref).into()),
}
}
#[must_use = "has no side effects"]
#[inline]
fn try_ref_from_prefix(source: &[u8]) -> Result<(&Self, &[u8]), TryCastError<&[u8], Self>>
where
Self: KnownLayout + Immutable,
{
static_assert_dst_is_not_zst!(Self);
try_ref_from_prefix_suffix(source, CastType::Prefix, None)
}
#[must_use = "has no side effects"]
#[inline]
fn try_ref_from_suffix(source: &[u8]) -> Result<(&[u8], &Self), TryCastError<&[u8], Self>>
where
Self: KnownLayout + Immutable,
{
static_assert_dst_is_not_zst!(Self);
try_ref_from_prefix_suffix(source, CastType::Suffix, None).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn try_mut_from_bytes(bytes: &mut [u8]) -> Result<&mut Self, TryCastError<&mut [u8], Self>>
where
Self: KnownLayout + IntoBytes,
{
static_assert_dst_is_not_zst!(Self);
match Ptr::from_mut(bytes).try_cast_into_no_leftover::<Self, BecauseExclusive>(None) {
Ok(source) => {
match source.try_into_valid() {
Ok(source) => Ok(source.as_mut()),
Err(e) => {
Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into())
}
}
}
Err(e) => Err(e.map_src(Ptr::as_mut).into()),
}
}
#[must_use = "has no side effects"]
#[inline]
fn try_mut_from_prefix(
source: &mut [u8],
) -> Result<(&mut Self, &mut [u8]), TryCastError<&mut [u8], Self>>
where
Self: KnownLayout + IntoBytes,
{
static_assert_dst_is_not_zst!(Self);
try_mut_from_prefix_suffix(source, CastType::Prefix, None)
}
#[must_use = "has no side effects"]
#[inline]
fn try_mut_from_suffix(
source: &mut [u8],
) -> Result<(&mut [u8], &mut Self), TryCastError<&mut [u8], Self>>
where
Self: KnownLayout + IntoBytes,
{
static_assert_dst_is_not_zst!(Self);
try_mut_from_prefix_suffix(source, CastType::Suffix, None).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn try_ref_from_bytes_with_elems(
source: &[u8],
count: usize,
) -> Result<&Self, TryCastError<&[u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + Immutable,
{
match Ptr::from_ref(source).try_cast_into_no_leftover::<Self, BecauseImmutable>(Some(count))
{
Ok(source) => {
match source.try_into_valid() {
Ok(source) => Ok(source.as_ref()),
Err(e) => {
Err(e.map_src(|src| src.as_bytes::<BecauseImmutable>().as_ref()).into())
}
}
}
Err(e) => Err(e.map_src(Ptr::as_ref).into()),
}
}
#[must_use = "has no side effects"]
#[inline]
fn try_ref_from_prefix_with_elems(
source: &[u8],
count: usize,
) -> Result<(&Self, &[u8]), TryCastError<&[u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + Immutable,
{
try_ref_from_prefix_suffix(source, CastType::Prefix, Some(count))
}
#[must_use = "has no side effects"]
#[inline]
fn try_ref_from_suffix_with_elems(
source: &[u8],
count: usize,
) -> Result<(&[u8], &Self), TryCastError<&[u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + Immutable,
{
try_ref_from_prefix_suffix(source, CastType::Suffix, Some(count)).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn try_mut_from_bytes_with_elems(
source: &mut [u8],
count: usize,
) -> Result<&mut Self, TryCastError<&mut [u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + IntoBytes,
{
match Ptr::from_mut(source).try_cast_into_no_leftover::<Self, BecauseExclusive>(Some(count))
{
Ok(source) => {
match source.try_into_valid() {
Ok(source) => Ok(source.as_mut()),
Err(e) => {
Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into())
}
}
}
Err(e) => Err(e.map_src(Ptr::as_mut).into()),
}
}
#[must_use = "has no side effects"]
#[inline]
fn try_mut_from_prefix_with_elems(
source: &mut [u8],
count: usize,
) -> Result<(&mut Self, &mut [u8]), TryCastError<&mut [u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + IntoBytes,
{
try_mut_from_prefix_suffix(source, CastType::Prefix, Some(count))
}
#[must_use = "has no side effects"]
#[inline]
fn try_mut_from_suffix_with_elems(
source: &mut [u8],
count: usize,
) -> Result<(&mut [u8], &mut Self), TryCastError<&mut [u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + IntoBytes,
{
try_mut_from_prefix_suffix(source, CastType::Suffix, Some(count)).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn try_read_from_bytes(source: &[u8]) -> Result<Self, TryReadError<&[u8], Self>>
where
Self: Sized,
{
let candidate = match CoreMaybeUninit::<Self>::read_from_bytes(source) {
Ok(candidate) => candidate,
Err(e) => {
return Err(TryReadError::Size(e.with_dst()));
}
};
unsafe { try_read_from(source, candidate) }
}
#[must_use = "has no side effects"]
#[inline]
fn try_read_from_prefix(source: &[u8]) -> Result<(Self, &[u8]), TryReadError<&[u8], Self>>
where
Self: Sized,
{
let (candidate, suffix) = match CoreMaybeUninit::<Self>::read_from_prefix(source) {
Ok(candidate) => candidate,
Err(e) => {
return Err(TryReadError::Size(e.with_dst()));
}
};
unsafe { try_read_from(source, candidate).map(|slf| (slf, suffix)) }
}
#[must_use = "has no side effects"]
#[inline]
fn try_read_from_suffix(source: &[u8]) -> Result<(&[u8], Self), TryReadError<&[u8], Self>>
where
Self: Sized,
{
let (prefix, candidate) = match CoreMaybeUninit::<Self>::read_from_suffix(source) {
Ok(candidate) => candidate,
Err(e) => {
return Err(TryReadError::Size(e.with_dst()));
}
};
unsafe { try_read_from(source, candidate).map(|slf| (prefix, slf)) }
}
}
#[inline(always)]
fn try_ref_from_prefix_suffix<T: TryFromBytes + KnownLayout + Immutable + ?Sized>(
source: &[u8],
cast_type: CastType,
meta: Option<T::PointerMetadata>,
) -> Result<(&T, &[u8]), TryCastError<&[u8], T>> {
match Ptr::from_ref(source).try_cast_into::<T, BecauseImmutable>(cast_type, meta) {
Ok((source, prefix_suffix)) => {
match source.try_into_valid() {
Ok(valid) => Ok((valid.as_ref(), prefix_suffix.as_ref())),
Err(e) => Err(e.map_src(|src| src.as_bytes::<BecauseImmutable>().as_ref()).into()),
}
}
Err(e) => Err(e.map_src(Ptr::as_ref).into()),
}
}
#[inline(always)]
fn try_mut_from_prefix_suffix<T: IntoBytes + TryFromBytes + KnownLayout + ?Sized>(
candidate: &mut [u8],
cast_type: CastType,
meta: Option<T::PointerMetadata>,
) -> Result<(&mut T, &mut [u8]), TryCastError<&mut [u8], T>> {
match Ptr::from_mut(candidate).try_cast_into::<T, BecauseExclusive>(cast_type, meta) {
Ok((candidate, prefix_suffix)) => {
match candidate.try_into_valid() {
Ok(valid) => Ok((valid.as_mut(), prefix_suffix.as_mut())),
Err(e) => Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into()),
}
}
Err(e) => Err(e.map_src(Ptr::as_mut).into()),
}
}
#[inline(always)]
fn swap<T, U>((t, u): (T, U)) -> (U, T) {
(u, t)
}
#[inline(always)]
unsafe fn try_read_from<S, T: TryFromBytes>(
source: S,
mut candidate: CoreMaybeUninit<T>,
) -> Result<T, TryReadError<S, T>> {
let c_ptr = Ptr::from_mut(&mut candidate);
let c_ptr = unsafe { c_ptr.assume_validity::<invariant::Initialized>() };
let c_ptr = c_ptr.transmute();
if !Wrapping::<T>::is_bit_valid(c_ptr.forget_aligned()) {
return Err(ValidityError::new(source).into());
}
fn _assert_same_size_and_validity<T>()
where
Wrapping<T>: pointer::TransmuteFrom<T, invariant::Valid, invariant::Valid>,
T: pointer::TransmuteFrom<Wrapping<T>, invariant::Valid, invariant::Valid>,
{
}
_assert_same_size_and_validity::<T>();
Ok(unsafe { candidate.assume_init() })
}
#[cfg_attr(
feature = "derive",
doc = "[derive]: zerocopy_derive::FromZeros",
doc = "[derive-analysis]: zerocopy_derive::FromZeros#analysis"
)]
#[cfg_attr(
not(feature = "derive"),
doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromZeros.html"),
doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromZeros.html#analysis"),
)]
#[cfg_attr(
zerocopy_diagnostic_on_unimplemented_1_78_0,
diagnostic::on_unimplemented(note = "Consider adding `#[derive(FromZeros)]` to `{Self}`")
)]
pub unsafe trait FromZeros: TryFromBytes {
#[doc(hidden)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized;
#[inline(always)]
fn zero(&mut self) {
let slf: *mut Self = self;
let len = mem::size_of_val(self);
unsafe { ptr::write_bytes(slf.cast::<u8>(), 0, len) };
}
#[must_use = "has no side effects"]
#[inline(always)]
fn new_zeroed() -> Self
where
Self: Sized,
{
unsafe { mem::zeroed() }
}
#[must_use = "has no side effects (other than allocation)"]
#[cfg(any(feature = "alloc", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[inline]
fn new_box_zeroed() -> Result<Box<Self>, AllocError>
where
Self: Sized,
{
let layout = Layout::new::<Self>();
if layout.size() == 0 {
return Ok(unsafe { Box::from_raw(NonNull::dangling().as_ptr()) });
}
#[allow(clippy::undocumented_unsafe_blocks)]
let ptr = unsafe { alloc::alloc::alloc_zeroed(layout).cast::<Self>() };
if ptr.is_null() {
return Err(AllocError);
}
#[allow(clippy::undocumented_unsafe_blocks)]
Ok(unsafe { Box::from_raw(ptr) })
}
#[must_use = "has no side effects (other than allocation)"]
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[inline]
fn new_box_zeroed_with_elems(count: usize) -> Result<Box<Self>, AllocError>
where
Self: KnownLayout<PointerMetadata = usize>,
{
unsafe { crate::util::new_box(count, alloc::alloc::alloc_zeroed) }
}
#[deprecated(since = "0.8.0", note = "renamed to `FromZeros::new_box_zeroed_with_elems`")]
#[doc(hidden)]
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[must_use = "has no side effects (other than allocation)"]
#[inline(always)]
fn new_box_slice_zeroed(len: usize) -> Result<Box<[Self]>, AllocError>
where
Self: Sized,
{
<[Self]>::new_box_zeroed_with_elems(len)
}
#[must_use = "has no side effects (other than allocation)"]
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[inline(always)]
fn new_vec_zeroed(len: usize) -> Result<Vec<Self>, AllocError>
where
Self: Sized,
{
<[Self]>::new_box_zeroed_with_elems(len).map(Into::into)
}
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.57.0", feature = "alloc"))))]
#[inline(always)]
fn extend_vec_zeroed(v: &mut Vec<Self>, additional: usize) -> Result<(), AllocError>
where
Self: Sized,
{
<Self as FromZeros>::insert_vec_zeroed(v, v.len(), additional)
}
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(all(rust = "1.57.0", feature = "alloc"))))]
#[inline]
fn insert_vec_zeroed(
v: &mut Vec<Self>,
position: usize,
additional: usize,
) -> Result<(), AllocError>
where
Self: Sized,
{
assert!(position <= v.len());
v.try_reserve(additional).map_err(|_| AllocError)?;
unsafe {
let ptr = v.as_mut_ptr();
#[allow(clippy::arithmetic_side_effects)]
ptr.add(position).copy_to(ptr.add(position + additional), v.len() - position);
ptr.add(position).write_bytes(0, additional);
#[allow(clippy::arithmetic_side_effects)]
v.set_len(v.len() + additional);
}
Ok(())
}
}
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::FromBytes;
#[cfg_attr(
feature = "derive",
doc = "[derive]: zerocopy_derive::FromBytes",
doc = "[derive-analysis]: zerocopy_derive::FromBytes#analysis"
)]
#[cfg_attr(
not(feature = "derive"),
doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromBytes.html"),
doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.FromBytes.html#analysis"),
)]
#[cfg_attr(
zerocopy_diagnostic_on_unimplemented_1_78_0,
diagnostic::on_unimplemented(note = "Consider adding `#[derive(FromBytes)]` to `{Self}`")
)]
pub unsafe trait FromBytes: FromZeros {
#[doc(hidden)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized;
#[must_use = "has no side effects"]
#[inline]
fn ref_from_bytes(source: &[u8]) -> Result<&Self, CastError<&[u8], Self>>
where
Self: KnownLayout + Immutable,
{
static_assert_dst_is_not_zst!(Self);
match Ptr::from_ref(source).try_cast_into_no_leftover::<_, BecauseImmutable>(None) {
Ok(ptr) => Ok(ptr.recall_validity().as_ref()),
Err(err) => Err(err.map_src(|src| src.as_ref())),
}
}
#[must_use = "has no side effects"]
#[inline]
fn ref_from_prefix(source: &[u8]) -> Result<(&Self, &[u8]), CastError<&[u8], Self>>
where
Self: KnownLayout + Immutable,
{
static_assert_dst_is_not_zst!(Self);
ref_from_prefix_suffix(source, None, CastType::Prefix)
}
#[must_use = "has no side effects"]
#[inline]
fn ref_from_suffix(source: &[u8]) -> Result<(&[u8], &Self), CastError<&[u8], Self>>
where
Self: Immutable + KnownLayout,
{
static_assert_dst_is_not_zst!(Self);
ref_from_prefix_suffix(source, None, CastType::Suffix).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn mut_from_bytes(source: &mut [u8]) -> Result<&mut Self, CastError<&mut [u8], Self>>
where
Self: IntoBytes + KnownLayout,
{
static_assert_dst_is_not_zst!(Self);
match Ptr::from_mut(source).try_cast_into_no_leftover::<_, BecauseExclusive>(None) {
Ok(ptr) => Ok(ptr.recall_validity::<_, (_, (_, _))>().as_mut()),
Err(err) => Err(err.map_src(|src| src.as_mut())),
}
}
#[must_use = "has no side effects"]
#[inline]
fn mut_from_prefix(
source: &mut [u8],
) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>>
where
Self: IntoBytes + KnownLayout,
{
static_assert_dst_is_not_zst!(Self);
mut_from_prefix_suffix(source, None, CastType::Prefix)
}
#[must_use = "has no side effects"]
#[inline]
fn mut_from_suffix(
source: &mut [u8],
) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>>
where
Self: IntoBytes + KnownLayout,
{
static_assert_dst_is_not_zst!(Self);
mut_from_prefix_suffix(source, None, CastType::Suffix).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn ref_from_bytes_with_elems(
source: &[u8],
count: usize,
) -> Result<&Self, CastError<&[u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + Immutable,
{
let source = Ptr::from_ref(source);
let maybe_slf = source.try_cast_into_no_leftover::<_, BecauseImmutable>(Some(count));
match maybe_slf {
Ok(slf) => Ok(slf.recall_validity().as_ref()),
Err(err) => Err(err.map_src(|s| s.as_ref())),
}
}
#[must_use = "has no side effects"]
#[inline]
fn ref_from_prefix_with_elems(
source: &[u8],
count: usize,
) -> Result<(&Self, &[u8]), CastError<&[u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + Immutable,
{
ref_from_prefix_suffix(source, Some(count), CastType::Prefix)
}
#[must_use = "has no side effects"]
#[inline]
fn ref_from_suffix_with_elems(
source: &[u8],
count: usize,
) -> Result<(&[u8], &Self), CastError<&[u8], Self>>
where
Self: KnownLayout<PointerMetadata = usize> + Immutable,
{
ref_from_prefix_suffix(source, Some(count), CastType::Suffix).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn mut_from_bytes_with_elems(
source: &mut [u8],
count: usize,
) -> Result<&mut Self, CastError<&mut [u8], Self>>
where
Self: IntoBytes + KnownLayout<PointerMetadata = usize> + Immutable,
{
let source = Ptr::from_mut(source);
let maybe_slf = source.try_cast_into_no_leftover::<_, BecauseImmutable>(Some(count));
match maybe_slf {
Ok(slf) => Ok(slf
.recall_validity::<_, (_, (_, (BecauseExclusive, BecauseExclusive)))>()
.as_mut()),
Err(err) => Err(err.map_src(|s| s.as_mut())),
}
}
#[must_use = "has no side effects"]
#[inline]
fn mut_from_prefix_with_elems(
source: &mut [u8],
count: usize,
) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>>
where
Self: IntoBytes + KnownLayout<PointerMetadata = usize>,
{
mut_from_prefix_suffix(source, Some(count), CastType::Prefix)
}
#[must_use = "has no side effects"]
#[inline]
fn mut_from_suffix_with_elems(
source: &mut [u8],
count: usize,
) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>>
where
Self: IntoBytes + KnownLayout<PointerMetadata = usize>,
{
mut_from_prefix_suffix(source, Some(count), CastType::Suffix).map(swap)
}
#[must_use = "has no side effects"]
#[inline]
fn read_from_bytes(source: &[u8]) -> Result<Self, SizeError<&[u8], Self>>
where
Self: Sized,
{
match Ref::<_, Unalign<Self>>::sized_from(source) {
Ok(r) => Ok(Ref::read(&r).into_inner()),
Err(CastError::Size(e)) => Err(e.with_dst()),
Err(CastError::Alignment(_)) => {
unsafe { core::hint::unreachable_unchecked() }
}
Err(CastError::Validity(i)) => match i {},
}
}
#[must_use = "has no side effects"]
#[inline]
fn read_from_prefix(source: &[u8]) -> Result<(Self, &[u8]), SizeError<&[u8], Self>>
where
Self: Sized,
{
match Ref::<_, Unalign<Self>>::sized_from_prefix(source) {
Ok((r, suffix)) => Ok((Ref::read(&r).into_inner(), suffix)),
Err(CastError::Size(e)) => Err(e.with_dst()),
Err(CastError::Alignment(_)) => {
unsafe { core::hint::unreachable_unchecked() }
}
Err(CastError::Validity(i)) => match i {},
}
}
#[must_use = "has no side effects"]
#[inline]
fn read_from_suffix(source: &[u8]) -> Result<(&[u8], Self), SizeError<&[u8], Self>>
where
Self: Sized,
{
match Ref::<_, Unalign<Self>>::sized_from_suffix(source) {
Ok((prefix, r)) => Ok((prefix, Ref::read(&r).into_inner())),
Err(CastError::Size(e)) => Err(e.with_dst()),
Err(CastError::Alignment(_)) => {
unsafe { core::hint::unreachable_unchecked() }
}
Err(CastError::Validity(i)) => match i {},
}
}
#[cfg(feature = "std")]
#[inline(always)]
fn read_from_io<R>(mut src: R) -> io::Result<Self>
where
Self: Sized,
R: io::Read,
{
let mut buf = CoreMaybeUninit::<Self>::uninit();
buf.zero();
let ptr = Ptr::from_mut(&mut buf);
let ptr = unsafe { ptr.assume_validity::<invariant::Initialized>() };
let ptr = ptr.as_bytes::<BecauseExclusive>();
src.read_exact(ptr.as_mut())?;
Ok(unsafe { buf.assume_init() })
}
#[deprecated(since = "0.8.0", note = "renamed to `FromBytes::ref_from_bytes`")]
#[doc(hidden)]
#[must_use = "has no side effects"]
#[inline(always)]
fn ref_from(source: &[u8]) -> Option<&Self>
where
Self: KnownLayout + Immutable,
{
Self::ref_from_bytes(source).ok()
}
#[deprecated(since = "0.8.0", note = "renamed to `FromBytes::mut_from_bytes`")]
#[doc(hidden)]
#[must_use = "has no side effects"]
#[inline(always)]
fn mut_from(source: &mut [u8]) -> Option<&mut Self>
where
Self: KnownLayout + IntoBytes,
{
Self::mut_from_bytes(source).ok()
}
#[deprecated(since = "0.8.0", note = "renamed to `FromBytes::ref_from_prefix_with_elems`")]
#[doc(hidden)]
#[must_use = "has no side effects"]
#[inline(always)]
fn slice_from_prefix(source: &[u8], count: usize) -> Option<(&[Self], &[u8])>
where
Self: Sized + Immutable,
{
<[Self]>::ref_from_prefix_with_elems(source, count).ok()
}
#[deprecated(since = "0.8.0", note = "renamed to `FromBytes::ref_from_suffix_with_elems`")]
#[doc(hidden)]
#[must_use = "has no side effects"]
#[inline(always)]
fn slice_from_suffix(source: &[u8], count: usize) -> Option<(&[u8], &[Self])>
where
Self: Sized + Immutable,
{
<[Self]>::ref_from_suffix_with_elems(source, count).ok()
}
#[deprecated(since = "0.8.0", note = "renamed to `FromBytes::mut_from_prefix_with_elems`")]
#[doc(hidden)]
#[must_use = "has no side effects"]
#[inline(always)]
fn mut_slice_from_prefix(source: &mut [u8], count: usize) -> Option<(&mut [Self], &mut [u8])>
where
Self: Sized + IntoBytes,
{
<[Self]>::mut_from_prefix_with_elems(source, count).ok()
}
#[deprecated(since = "0.8.0", note = "renamed to `FromBytes::mut_from_suffix_with_elems`")]
#[doc(hidden)]
#[must_use = "has no side effects"]
#[inline(always)]
fn mut_slice_from_suffix(source: &mut [u8], count: usize) -> Option<(&mut [u8], &mut [Self])>
where
Self: Sized + IntoBytes,
{
<[Self]>::mut_from_suffix_with_elems(source, count).ok()
}
#[deprecated(since = "0.8.0", note = "renamed to `FromBytes::read_from_bytes`")]
#[doc(hidden)]
#[must_use = "has no side effects"]
#[inline(always)]
fn read_from(source: &[u8]) -> Option<Self>
where
Self: Sized,
{
Self::read_from_bytes(source).ok()
}
}
#[inline(always)]
fn ref_from_prefix_suffix<T: FromBytes + KnownLayout + Immutable + ?Sized>(
source: &[u8],
meta: Option<T::PointerMetadata>,
cast_type: CastType,
) -> Result<(&T, &[u8]), CastError<&[u8], T>> {
let (slf, prefix_suffix) = Ptr::from_ref(source)
.try_cast_into::<_, BecauseImmutable>(cast_type, meta)
.map_err(|err| err.map_src(|s| s.as_ref()))?;
Ok((slf.recall_validity().as_ref(), prefix_suffix.as_ref()))
}
#[inline(always)]
fn mut_from_prefix_suffix<T: FromBytes + IntoBytes + KnownLayout + ?Sized>(
source: &mut [u8],
meta: Option<T::PointerMetadata>,
cast_type: CastType,
) -> Result<(&mut T, &mut [u8]), CastError<&mut [u8], T>> {
let (slf, prefix_suffix) = Ptr::from_mut(source)
.try_cast_into::<_, BecauseExclusive>(cast_type, meta)
.map_err(|err| err.map_src(|s| s.as_mut()))?;
Ok((slf.recall_validity::<_, (_, (_, _))>().as_mut(), prefix_suffix.as_mut()))
}
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::IntoBytes;
#[cfg_attr(
feature = "derive",
doc = "[derive]: zerocopy_derive::IntoBytes",
doc = "[derive-analysis]: zerocopy_derive::IntoBytes#analysis"
)]
#[cfg_attr(
not(feature = "derive"),
doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.IntoBytes.html"),
doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.IntoBytes.html#analysis"),
)]
#[cfg_attr(
zerocopy_diagnostic_on_unimplemented_1_78_0,
diagnostic::on_unimplemented(note = "Consider adding `#[derive(IntoBytes)]` to `{Self}`")
)]
pub unsafe trait IntoBytes {
#[doc(hidden)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized;
#[must_use = "has no side effects"]
#[inline(always)]
fn as_bytes(&self) -> &[u8]
where
Self: Immutable,
{
let len = mem::size_of_val(self);
let slf: *const Self = self;
unsafe { slice::from_raw_parts(slf.cast::<u8>(), len) }
}
#[must_use = "has no side effects"]
#[inline(always)]
fn as_mut_bytes(&mut self) -> &mut [u8]
where
Self: FromBytes,
{
let len = mem::size_of_val(self);
let slf: *mut Self = self;
unsafe { slice::from_raw_parts_mut(slf.cast::<u8>(), len) }
}
#[must_use = "callers should check the return value to see if the operation succeeded"]
#[inline]
#[allow(clippy::mut_from_ref)] fn write_to(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>>
where
Self: Immutable,
{
let src = self.as_bytes();
if dst.len() == src.len() {
unsafe { util::copy_unchecked(src, dst) }
Ok(())
} else {
Err(SizeError::new(self))
}
}
#[must_use = "callers should check the return value to see if the operation succeeded"]
#[inline]
#[allow(clippy::mut_from_ref)] fn write_to_prefix(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>>
where
Self: Immutable,
{
let src = self.as_bytes();
match dst.get_mut(..src.len()) {
Some(dst) => {
unsafe { util::copy_unchecked(src, dst) }
Ok(())
}
None => Err(SizeError::new(self)),
}
}
#[must_use = "callers should check the return value to see if the operation succeeded"]
#[inline]
#[allow(clippy::mut_from_ref)] fn write_to_suffix(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>>
where
Self: Immutable,
{
let src = self.as_bytes();
let start = if let Some(start) = dst.len().checked_sub(src.len()) {
start
} else {
return Err(SizeError::new(self));
};
let dst = if let Some(dst) = dst.get_mut(start..) {
dst
} else {
return Err(SizeError::new(self));
};
unsafe {
util::copy_unchecked(src, dst);
}
Ok(())
}
#[cfg(feature = "std")]
#[inline(always)]
fn write_to_io<W>(&self, mut dst: W) -> io::Result<()>
where
Self: Immutable,
W: io::Write,
{
dst.write_all(self.as_bytes())
}
#[deprecated(since = "0.8.0", note = "`IntoBytes::as_bytes_mut` was renamed to `as_mut_bytes`")]
#[doc(hidden)]
#[inline]
fn as_bytes_mut(&mut self) -> &mut [u8]
where
Self: FromBytes,
{
self.as_mut_bytes()
}
}
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::Unaligned;
#[cfg_attr(
feature = "derive",
doc = "[derive]: zerocopy_derive::Unaligned",
doc = "[derive-analysis]: zerocopy_derive::Unaligned#analysis"
)]
#[cfg_attr(
not(feature = "derive"),
doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Unaligned.html"),
doc = concat!("[derive-analysis]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.Unaligned.html#analysis"),
)]
#[cfg_attr(
zerocopy_diagnostic_on_unimplemented_1_78_0,
diagnostic::on_unimplemented(note = "Consider adding `#[derive(Unaligned)]` to `{Self}`")
)]
pub unsafe trait Unaligned {
#[doc(hidden)]
fn only_derive_is_allowed_to_implement_this_trait()
where
Self: Sized;
}
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::ByteEq;
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::ByteHash;
#[cfg(any(feature = "derive", test))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use zerocopy_derive::SplitAt;
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
mod alloc_support {
use super::*;
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[doc(hidden)]
#[deprecated(since = "0.8.0", note = "moved to `FromZeros`")]
#[inline(always)]
pub fn extend_vec_zeroed<T: FromZeros>(
v: &mut Vec<T>,
additional: usize,
) -> Result<(), AllocError> {
<T as FromZeros>::extend_vec_zeroed(v, additional)
}
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[doc(hidden)]
#[deprecated(since = "0.8.0", note = "moved to `FromZeros`")]
#[inline(always)]
pub fn insert_vec_zeroed<T: FromZeros>(
v: &mut Vec<T>,
position: usize,
additional: usize,
) -> Result<(), AllocError> {
<T as FromZeros>::insert_vec_zeroed(v, position, additional)
}
}
#[cfg(feature = "alloc")]
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[doc(hidden)]
pub use alloc_support::*;
#[cfg(test)]
#[allow(clippy::assertions_on_result_states, clippy::unreadable_literal)]
mod tests {
use static_assertions::assert_impl_all;
use super::*;
use crate::util::testutil::*;
#[derive(Debug, Eq, PartialEq, FromBytes, IntoBytes, Unaligned, Immutable)]
#[repr(transparent)]
struct Unsized([u8]);
impl Unsized {
fn from_mut_slice(slc: &mut [u8]) -> &mut Unsized {
unsafe { mem::transmute(slc) }
}
}
#[test]
fn test_known_layout() {
macro_rules! test {
($ty:ty, $expect:expr) => {
let expect = $expect;
assert_eq!(<$ty as KnownLayout>::LAYOUT, expect);
assert_eq!(<ManuallyDrop<$ty> as KnownLayout>::LAYOUT, expect);
assert_eq!(<PhantomData<$ty> as KnownLayout>::LAYOUT, <() as KnownLayout>::LAYOUT);
};
}
let layout =
|offset, align, trailing_slice_elem_size, statically_shallow_unpadded| DstLayout {
align: NonZeroUsize::new(align).unwrap(),
size_info: match trailing_slice_elem_size {
None => SizeInfo::Sized { size: offset },
Some(elem_size) => {
SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size })
}
},
statically_shallow_unpadded,
};
test!((), layout(0, 1, None, false));
test!(u8, layout(1, 1, None, false));
test!(u64, layout(8, mem::align_of::<u64>(), None, false));
test!(AU64, layout(8, 8, None, false));
test!(Option<&'static ()>, usize::LAYOUT);
test!([()], layout(0, 1, Some(0), true));
test!([u8], layout(0, 1, Some(1), true));
test!(str, layout(0, 1, Some(1), true));
}
#[cfg(feature = "derive")]
#[test]
fn test_known_layout_derive() {
struct NotKnownLayout<T = ()> {
_t: T,
}
#[derive(KnownLayout)]
#[repr(C)]
struct AlignSize<const ALIGN: usize, const SIZE: usize>
where
elain::Align<ALIGN>: elain::Alignment,
{
_align: elain::Align<ALIGN>,
size: [u8; SIZE],
}
type AU16 = AlignSize<2, 2>;
type AU32 = AlignSize<4, 4>;
fn _assert_kl<T: ?Sized + KnownLayout>(_: &T) {}
let sized_layout = |align, size| DstLayout {
align: NonZeroUsize::new(align).unwrap(),
size_info: SizeInfo::Sized { size },
statically_shallow_unpadded: false,
};
let unsized_layout = |align, elem_size, offset, statically_shallow_unpadded| DstLayout {
align: NonZeroUsize::new(align).unwrap(),
size_info: SizeInfo::SliceDst(TrailingSliceLayout { offset, elem_size }),
statically_shallow_unpadded,
};
#[allow(dead_code)]
#[derive(KnownLayout)]
struct KL01(NotKnownLayout<AU32>, NotKnownLayout<AU16>);
let expected = DstLayout::for_type::<KL01>();
assert_eq!(<KL01 as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL01 as KnownLayout>::LAYOUT, sized_layout(4, 8));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(align(64))]
struct KL01Align(NotKnownLayout<AU32>, NotKnownLayout<AU16>);
let expected = DstLayout::for_type::<KL01Align>();
assert_eq!(<KL01Align as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL01Align as KnownLayout>::LAYOUT, sized_layout(64, 64));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(packed)]
struct KL01Packed(NotKnownLayout<AU32>, NotKnownLayout<AU16>);
let expected = DstLayout::for_type::<KL01Packed>();
assert_eq!(<KL01Packed as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL01Packed as KnownLayout>::LAYOUT, sized_layout(1, 6));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(packed(2))]
struct KL01PackedN(NotKnownLayout<AU32>, NotKnownLayout<AU16>);
assert_impl_all!(KL01PackedN: KnownLayout);
let expected = DstLayout::for_type::<KL01PackedN>();
assert_eq!(<KL01PackedN as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL01PackedN as KnownLayout>::LAYOUT, sized_layout(2, 6));
#[allow(dead_code)]
#[derive(KnownLayout)]
struct KL03(NotKnownLayout, u8);
let expected = DstLayout::for_type::<KL03>();
assert_eq!(<KL03 as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL03 as KnownLayout>::LAYOUT, sized_layout(1, 1));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(align(64))]
struct KL03Align(NotKnownLayout<AU32>, u8);
let expected = DstLayout::for_type::<KL03Align>();
assert_eq!(<KL03Align as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL03Align as KnownLayout>::LAYOUT, sized_layout(64, 64));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(packed)]
struct KL03Packed(NotKnownLayout<AU32>, u8);
let expected = DstLayout::for_type::<KL03Packed>();
assert_eq!(<KL03Packed as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL03Packed as KnownLayout>::LAYOUT, sized_layout(1, 5));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(packed(2))]
struct KL03PackedN(NotKnownLayout<AU32>, u8);
assert_impl_all!(KL03PackedN: KnownLayout);
let expected = DstLayout::for_type::<KL03PackedN>();
assert_eq!(<KL03PackedN as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL03PackedN as KnownLayout>::LAYOUT, sized_layout(2, 6));
#[allow(dead_code)]
#[derive(KnownLayout)]
struct KL05<T>(u8, T);
fn _test_kl05<T>(t: T) -> impl KnownLayout {
KL05(0u8, t)
}
#[allow(dead_code)]
#[derive(KnownLayout)]
struct KL07<T: KnownLayout>(u8, T);
fn _test_kl07<T: KnownLayout>(t: T) -> impl KnownLayout {
let _ = KL07(0u8, t);
}
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C)]
struct KL10(NotKnownLayout<AU32>, [u8]);
let expected = DstLayout::new_zst(None)
.extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), None)
.extend(<[u8] as KnownLayout>::LAYOUT, None)
.pad_to_align();
assert_eq!(<KL10 as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL10 as KnownLayout>::LAYOUT, unsized_layout(4, 1, 4, false));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C, align(64))]
struct KL10Align(NotKnownLayout<AU32>, [u8]);
let repr_align = NonZeroUsize::new(64);
let expected = DstLayout::new_zst(repr_align)
.extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), None)
.extend(<[u8] as KnownLayout>::LAYOUT, None)
.pad_to_align();
assert_eq!(<KL10Align as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL10Align as KnownLayout>::LAYOUT, unsized_layout(64, 1, 4, false));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C, packed)]
struct KL10Packed(NotKnownLayout<AU32>, [u8]);
let repr_packed = NonZeroUsize::new(1);
let expected = DstLayout::new_zst(None)
.extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), repr_packed)
.extend(<[u8] as KnownLayout>::LAYOUT, repr_packed)
.pad_to_align();
assert_eq!(<KL10Packed as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL10Packed as KnownLayout>::LAYOUT, unsized_layout(1, 1, 4, false));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C, packed(2))]
struct KL10PackedN(NotKnownLayout<AU32>, [u8]);
let repr_packed = NonZeroUsize::new(2);
let expected = DstLayout::new_zst(None)
.extend(DstLayout::for_type::<NotKnownLayout<AU32>>(), repr_packed)
.extend(<[u8] as KnownLayout>::LAYOUT, repr_packed)
.pad_to_align();
assert_eq!(<KL10PackedN as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL10PackedN as KnownLayout>::LAYOUT, unsized_layout(2, 1, 4, false));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C)]
struct KL11(NotKnownLayout<AU64>, u8);
let expected = DstLayout::new_zst(None)
.extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), None)
.extend(<u8 as KnownLayout>::LAYOUT, None)
.pad_to_align();
assert_eq!(<KL11 as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL11 as KnownLayout>::LAYOUT, sized_layout(8, 16));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C, align(64))]
struct KL11Align(NotKnownLayout<AU64>, u8);
let repr_align = NonZeroUsize::new(64);
let expected = DstLayout::new_zst(repr_align)
.extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), None)
.extend(<u8 as KnownLayout>::LAYOUT, None)
.pad_to_align();
assert_eq!(<KL11Align as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL11Align as KnownLayout>::LAYOUT, sized_layout(64, 64));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C, packed)]
struct KL11Packed(NotKnownLayout<AU64>, u8);
let repr_packed = NonZeroUsize::new(1);
let expected = DstLayout::new_zst(None)
.extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), repr_packed)
.extend(<u8 as KnownLayout>::LAYOUT, repr_packed)
.pad_to_align();
assert_eq!(<KL11Packed as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL11Packed as KnownLayout>::LAYOUT, sized_layout(1, 9));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C, packed(2))]
struct KL11PackedN(NotKnownLayout<AU64>, u8);
let repr_packed = NonZeroUsize::new(2);
let expected = DstLayout::new_zst(None)
.extend(DstLayout::for_type::<NotKnownLayout<AU64>>(), repr_packed)
.extend(<u8 as KnownLayout>::LAYOUT, repr_packed)
.pad_to_align();
assert_eq!(<KL11PackedN as KnownLayout>::LAYOUT, expected);
assert_eq!(<KL11PackedN as KnownLayout>::LAYOUT, sized_layout(2, 10));
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C)]
struct KL14<T: ?Sized + KnownLayout>(u8, T);
fn _test_kl14<T: ?Sized + KnownLayout>(kl: &KL14<T>) {
_assert_kl(kl)
}
#[allow(dead_code)]
#[derive(KnownLayout)]
#[repr(C)]
struct KL15<T: KnownLayout>(u8, T);
fn _test_kl15<T: KnownLayout>(t: T) -> impl KnownLayout {
let _ = KL15(0u8, t);
}
#[allow(clippy::upper_case_acronyms, dead_code)]
#[derive(KnownLayout)]
#[repr(C)]
struct KLTU<T, U: ?Sized>(T, U);
assert_eq!(<KLTU<(), ()> as KnownLayout>::LAYOUT, sized_layout(1, 0));
assert_eq!(<KLTU<(), u8> as KnownLayout>::LAYOUT, sized_layout(1, 1));
assert_eq!(<KLTU<(), AU16> as KnownLayout>::LAYOUT, sized_layout(2, 2));
assert_eq!(<KLTU<(), [()]> as KnownLayout>::LAYOUT, unsized_layout(1, 0, 0, false));
assert_eq!(<KLTU<(), [u8]> as KnownLayout>::LAYOUT, unsized_layout(1, 1, 0, false));
assert_eq!(<KLTU<(), [AU16]> as KnownLayout>::LAYOUT, unsized_layout(2, 2, 0, false));
assert_eq!(<KLTU<u8, ()> as KnownLayout>::LAYOUT, sized_layout(1, 1));
assert_eq!(<KLTU<u8, u8> as KnownLayout>::LAYOUT, sized_layout(1, 2));
assert_eq!(<KLTU<u8, AU16> as KnownLayout>::LAYOUT, sized_layout(2, 4));
assert_eq!(<KLTU<u8, [()]> as KnownLayout>::LAYOUT, unsized_layout(1, 0, 1, false));
assert_eq!(<KLTU<u8, [u8]> as KnownLayout>::LAYOUT, unsized_layout(1, 1, 1, false));
assert_eq!(<KLTU<u8, [AU16]> as KnownLayout>::LAYOUT, unsized_layout(2, 2, 2, false));
assert_eq!(<KLTU<AU16, ()> as KnownLayout>::LAYOUT, sized_layout(2, 2));
assert_eq!(<KLTU<AU16, u8> as KnownLayout>::LAYOUT, sized_layout(2, 4));
assert_eq!(<KLTU<AU16, AU16> as KnownLayout>::LAYOUT, sized_layout(2, 4));
assert_eq!(<KLTU<AU16, [()]> as KnownLayout>::LAYOUT, unsized_layout(2, 0, 2, false));
assert_eq!(<KLTU<AU16, [u8]> as KnownLayout>::LAYOUT, unsized_layout(2, 1, 2, false));
assert_eq!(<KLTU<AU16, [AU16]> as KnownLayout>::LAYOUT, unsized_layout(2, 2, 2, false));
#[derive(KnownLayout)]
#[repr(C)]
struct KLF0;
assert_eq!(<KLF0 as KnownLayout>::LAYOUT, sized_layout(1, 0));
#[derive(KnownLayout)]
#[repr(C)]
struct KLF1([u8]);
assert_eq!(<KLF1 as KnownLayout>::LAYOUT, unsized_layout(1, 1, 0, true));
#[derive(KnownLayout)]
#[repr(C)]
struct KLF2(NotKnownLayout<u8>, [u8]);
assert_eq!(<KLF2 as KnownLayout>::LAYOUT, unsized_layout(1, 1, 1, false));
#[derive(KnownLayout)]
#[repr(C)]
struct KLF3(NotKnownLayout<u8>, NotKnownLayout<AU16>, [u8]);
assert_eq!(<KLF3 as KnownLayout>::LAYOUT, unsized_layout(2, 1, 4, false));
#[derive(KnownLayout)]
#[repr(C)]
struct KLF4(NotKnownLayout<u8>, NotKnownLayout<AU16>, NotKnownLayout<AU32>, [u8]);
assert_eq!(<KLF4 as KnownLayout>::LAYOUT, unsized_layout(4, 1, 8, false));
}
#[test]
fn test_object_safety() {
fn _takes_no_cell(_: &dyn Immutable) {}
fn _takes_unaligned(_: &dyn Unaligned) {}
}
#[test]
fn test_from_zeros_only() {
assert!(!bool::new_zeroed());
assert_eq!(char::new_zeroed(), '\0');
#[cfg(feature = "alloc")]
{
assert_eq!(bool::new_box_zeroed(), Ok(Box::new(false)));
assert_eq!(char::new_box_zeroed(), Ok(Box::new('\0')));
assert_eq!(
<[bool]>::new_box_zeroed_with_elems(3).unwrap().as_ref(),
[false, false, false]
);
assert_eq!(
<[char]>::new_box_zeroed_with_elems(3).unwrap().as_ref(),
['\0', '\0', '\0']
);
assert_eq!(bool::new_vec_zeroed(3).unwrap().as_ref(), [false, false, false]);
assert_eq!(char::new_vec_zeroed(3).unwrap().as_ref(), ['\0', '\0', '\0']);
}
let mut string = "hello".to_string();
let s: &mut str = string.as_mut();
assert_eq!(s, "hello");
s.zero();
assert_eq!(s, "\0\0\0\0\0");
}
#[test]
fn test_zst_count_preserved() {
#[cfg(feature = "alloc")]
assert_eq!(<[()]>::new_box_zeroed_with_elems(3).unwrap().len(), 3);
#[cfg(feature = "alloc")]
assert_eq!(<()>::new_vec_zeroed(3).unwrap().len(), 3);
assert_eq!(<[()]>::ref_from_bytes_with_elems(&[][..], 3).unwrap().len(), 3);
assert_eq!(<[()]>::ref_from_prefix_with_elems(&[][..], 3).unwrap().0.len(), 3);
assert_eq!(<[()]>::ref_from_suffix_with_elems(&[][..], 3).unwrap().1.len(), 3);
assert_eq!(<[()]>::mut_from_bytes_with_elems(&mut [][..], 3).unwrap().len(), 3);
assert_eq!(<[()]>::mut_from_prefix_with_elems(&mut [][..], 3).unwrap().0.len(), 3);
assert_eq!(<[()]>::mut_from_suffix_with_elems(&mut [][..], 3).unwrap().1.len(), 3);
}
#[test]
fn test_read_write() {
const VAL: u64 = 0x12345678;
#[cfg(target_endian = "big")]
const VAL_BYTES: [u8; 8] = VAL.to_be_bytes();
#[cfg(target_endian = "little")]
const VAL_BYTES: [u8; 8] = VAL.to_le_bytes();
const ZEROS: [u8; 8] = [0u8; 8];
assert_eq!(u64::read_from_bytes(&VAL_BYTES[..]), Ok(VAL));
let bytes_with_prefix: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]);
assert_eq!(u64::read_from_prefix(&bytes_with_prefix[..]), Ok((VAL, &ZEROS[..])));
assert_eq!(u64::read_from_suffix(&bytes_with_prefix[..]), Ok((&VAL_BYTES[..], 0)));
let bytes_with_suffix: [u8; 16] = transmute!([[0; 8], VAL_BYTES]);
assert_eq!(u64::read_from_prefix(&bytes_with_suffix[..]), Ok((0, &VAL_BYTES[..])));
assert_eq!(u64::read_from_suffix(&bytes_with_suffix[..]), Ok((&ZEROS[..], VAL)));
let mut bytes = [0u8; 8];
assert_eq!(VAL.write_to(&mut bytes[..]), Ok(()));
assert_eq!(bytes, VAL_BYTES);
let mut bytes = [0u8; 16];
assert_eq!(VAL.write_to_prefix(&mut bytes[..]), Ok(()));
let want: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]);
assert_eq!(bytes, want);
let mut bytes = [0u8; 16];
assert_eq!(VAL.write_to_suffix(&mut bytes[..]), Ok(()));
let want: [u8; 16] = transmute!([[0; 8], VAL_BYTES]);
assert_eq!(bytes, want);
}
#[test]
#[cfg(feature = "std")]
fn test_read_io_with_padding_soundness() {
#[derive(FromBytes)]
#[repr(C)]
struct WithPadding {
x: u8,
y: u16,
}
struct ReadsInRead;
impl std::io::Read for ReadsInRead {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
if buf.iter().all(|&x| x == 0) {
Ok(buf.len())
} else {
buf.iter_mut().for_each(|x| *x = 0);
Ok(buf.len())
}
}
}
assert!(matches!(WithPadding::read_from_io(ReadsInRead), Ok(WithPadding { x: 0, y: 0 })));
}
#[test]
#[cfg(feature = "std")]
fn test_read_write_io() {
let mut long_buffer = [0, 0, 0, 0];
assert!(matches!(u16::MAX.write_to_io(&mut long_buffer[..]), Ok(())));
assert_eq!(long_buffer, [255, 255, 0, 0]);
assert!(matches!(u16::read_from_io(&long_buffer[..]), Ok(u16::MAX)));
let mut short_buffer = [0, 0];
assert!(u32::MAX.write_to_io(&mut short_buffer[..]).is_err());
assert_eq!(short_buffer, [255, 255]);
assert!(u32::read_from_io(&short_buffer[..]).is_err());
}
#[test]
fn test_try_from_bytes_try_read_from() {
assert_eq!(<bool as TryFromBytes>::try_read_from_bytes(&[0]), Ok(false));
assert_eq!(<bool as TryFromBytes>::try_read_from_bytes(&[1]), Ok(true));
assert_eq!(<bool as TryFromBytes>::try_read_from_prefix(&[0, 2]), Ok((false, &[2][..])));
assert_eq!(<bool as TryFromBytes>::try_read_from_prefix(&[1, 2]), Ok((true, &[2][..])));
assert_eq!(<bool as TryFromBytes>::try_read_from_suffix(&[2, 0]), Ok((&[2][..], false)));
assert_eq!(<bool as TryFromBytes>::try_read_from_suffix(&[2, 1]), Ok((&[2][..], true)));
assert!(matches!(
<u8 as TryFromBytes>::try_read_from_bytes(&[]),
Err(TryReadError::Size(_))
));
assert!(matches!(
<u8 as TryFromBytes>::try_read_from_prefix(&[]),
Err(TryReadError::Size(_))
));
assert!(matches!(
<u8 as TryFromBytes>::try_read_from_suffix(&[]),
Err(TryReadError::Size(_))
));
assert!(matches!(
<u8 as TryFromBytes>::try_read_from_bytes(&[0, 0]),
Err(TryReadError::Size(_))
));
assert!(matches!(
<bool as TryFromBytes>::try_read_from_bytes(&[2]),
Err(TryReadError::Validity(_))
));
assert!(matches!(
<bool as TryFromBytes>::try_read_from_prefix(&[2, 0]),
Err(TryReadError::Validity(_))
));
assert!(matches!(
<bool as TryFromBytes>::try_read_from_suffix(&[0, 2]),
Err(TryReadError::Validity(_))
));
let bytes: [u8; 9] = [0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(<AU64 as TryFromBytes>::try_read_from_bytes(&bytes[..8]), Ok(AU64(0)));
assert_eq!(<AU64 as TryFromBytes>::try_read_from_bytes(&bytes[1..9]), Ok(AU64(0)));
assert_eq!(
<AU64 as TryFromBytes>::try_read_from_prefix(&bytes[..8]),
Ok((AU64(0), &[][..]))
);
assert_eq!(
<AU64 as TryFromBytes>::try_read_from_prefix(&bytes[1..9]),
Ok((AU64(0), &[][..]))
);
assert_eq!(
<AU64 as TryFromBytes>::try_read_from_suffix(&bytes[..8]),
Ok((&[][..], AU64(0)))
);
assert_eq!(
<AU64 as TryFromBytes>::try_read_from_suffix(&bytes[1..9]),
Ok((&[][..], AU64(0)))
);
}
#[test]
fn test_ref_from_mut_from() {
let mut buf =
Align::<[u8; 16], AU64>::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
assert_eq!(
AU64::ref_from_bytes(&buf.t[8..]).unwrap().0.to_ne_bytes(),
[8, 9, 10, 11, 12, 13, 14, 15]
);
let suffix = AU64::mut_from_bytes(&mut buf.t[8..]).unwrap();
suffix.0 = 0x0101010101010101;
assert_eq!(
<[u8; 9]>::ref_from_suffix(&buf.t[..]).unwrap(),
(&[0, 1, 2, 3, 4, 5, 6][..], &[7u8, 1, 1, 1, 1, 1, 1, 1, 1])
);
let (prefix, suffix) = AU64::mut_from_suffix(&mut buf.t[1..]).unwrap();
assert_eq!(prefix, &mut [1u8, 2, 3, 4, 5, 6, 7][..]);
suffix.0 = 0x0202020202020202;
let (prefix, suffix) = <[u8; 10]>::mut_from_suffix(&mut buf.t[..]).unwrap();
assert_eq!(prefix, &mut [0u8, 1, 2, 3, 4, 5][..]);
suffix[0] = 42;
assert_eq!(
<[u8; 9]>::ref_from_prefix(&buf.t[..]).unwrap(),
(&[0u8, 1, 2, 3, 4, 5, 42, 7, 2], &[2u8, 2, 2, 2, 2, 2, 2][..])
);
<[u8; 2]>::mut_from_prefix(&mut buf.t[..]).unwrap().0[1] = 30;
assert_eq!(buf.t, [0, 30, 2, 3, 4, 5, 42, 7, 2, 2, 2, 2, 2, 2, 2, 2]);
}
#[test]
fn test_ref_from_mut_from_error() {
let mut buf = Align::<[u8; 16], AU64>::default();
assert!(AU64::ref_from_bytes(&buf.t[..]).is_err());
assert!(AU64::mut_from_bytes(&mut buf.t[..]).is_err());
assert!(<[u8; 8]>::ref_from_bytes(&buf.t[..]).is_err());
assert!(<[u8; 8]>::mut_from_bytes(&mut buf.t[..]).is_err());
let mut buf = Align::<[u8; 4], AU64>::default();
assert!(AU64::ref_from_bytes(&buf.t[..]).is_err());
assert!(AU64::mut_from_bytes(&mut buf.t[..]).is_err());
assert!(<[u8; 8]>::ref_from_bytes(&buf.t[..]).is_err());
assert!(<[u8; 8]>::mut_from_bytes(&mut buf.t[..]).is_err());
assert!(AU64::ref_from_prefix(&buf.t[..]).is_err());
assert!(AU64::mut_from_prefix(&mut buf.t[..]).is_err());
assert!(AU64::ref_from_suffix(&buf.t[..]).is_err());
assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err());
assert!(<[u8; 8]>::ref_from_prefix(&buf.t[..]).is_err());
assert!(<[u8; 8]>::mut_from_prefix(&mut buf.t[..]).is_err());
assert!(<[u8; 8]>::ref_from_suffix(&buf.t[..]).is_err());
assert!(<[u8; 8]>::mut_from_suffix(&mut buf.t[..]).is_err());
let mut buf = Align::<[u8; 13], AU64>::default();
assert!(AU64::ref_from_bytes(&buf.t[1..]).is_err());
assert!(AU64::mut_from_bytes(&mut buf.t[1..]).is_err());
assert!(AU64::ref_from_bytes(&buf.t[1..]).is_err());
assert!(AU64::mut_from_bytes(&mut buf.t[1..]).is_err());
assert!(AU64::ref_from_prefix(&buf.t[1..]).is_err());
assert!(AU64::mut_from_prefix(&mut buf.t[1..]).is_err());
assert!(AU64::ref_from_suffix(&buf.t[..]).is_err());
assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err());
}
#[test]
fn test_to_methods() {
fn test<T: FromBytes + IntoBytes + Immutable + Debug + Eq + ?Sized, const N: usize>(
t: &mut T,
bytes: &[u8],
post_mutation: &T,
) {
assert_eq!(t.as_bytes(), bytes);
t.as_mut_bytes()[0] ^= 0xFF;
assert_eq!(t, post_mutation);
t.as_mut_bytes()[0] ^= 0xFF;
assert!(t.write_to(&mut vec![0; N - 1][..]).is_err());
assert!(t.write_to(&mut vec![0; N + 1][..]).is_err());
let mut bytes = [0; N];
assert_eq!(t.write_to(&mut bytes[..]), Ok(()));
assert_eq!(bytes, t.as_bytes());
assert!(t.write_to_prefix(&mut vec![0; N - 1][..]).is_err());
let mut bytes = [0; N];
assert_eq!(t.write_to_prefix(&mut bytes[..]), Ok(()));
assert_eq!(bytes, t.as_bytes());
let mut too_many_bytes = vec![0; N + 1];
too_many_bytes[N] = 123;
assert_eq!(t.write_to_prefix(&mut too_many_bytes[..]), Ok(()));
assert_eq!(&too_many_bytes[..N], t.as_bytes());
assert_eq!(too_many_bytes[N], 123);
assert!(t.write_to_suffix(&mut vec![0; N - 1][..]).is_err());
let mut bytes = [0; N];
assert_eq!(t.write_to_suffix(&mut bytes[..]), Ok(()));
assert_eq!(bytes, t.as_bytes());
let mut too_many_bytes = vec![0; N + 1];
too_many_bytes[0] = 123;
assert_eq!(t.write_to_suffix(&mut too_many_bytes[..]), Ok(()));
assert_eq!(&too_many_bytes[1..], t.as_bytes());
assert_eq!(too_many_bytes[0], 123);
}
#[derive(Debug, Eq, PartialEq, FromBytes, IntoBytes, Immutable)]
#[repr(C)]
struct Foo {
a: u32,
b: Wrapping<u32>,
c: Option<NonZeroU32>,
}
let expected_bytes: Vec<u8> = if cfg!(target_endian = "little") {
vec![1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0]
} else {
vec![0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0]
};
let post_mutation_expected_a =
if cfg!(target_endian = "little") { 0x00_00_00_FE } else { 0xFF_00_00_01 };
test::<_, 12>(
&mut Foo { a: 1, b: Wrapping(2), c: None },
expected_bytes.as_bytes(),
&Foo { a: post_mutation_expected_a, b: Wrapping(2), c: None },
);
test::<_, 3>(
Unsized::from_mut_slice(&mut [1, 2, 3]),
&[1, 2, 3],
Unsized::from_mut_slice(&mut [0xFE, 2, 3]),
);
}
#[test]
fn test_array() {
#[derive(FromBytes, IntoBytes, Immutable)]
#[repr(C)]
struct Foo {
a: [u16; 33],
}
let foo = Foo { a: [0xFFFF; 33] };
let expected = [0xFFu8; 66];
assert_eq!(foo.as_bytes(), &expected[..]);
}
#[test]
fn test_new_zeroed() {
assert!(!bool::new_zeroed());
assert_eq!(u64::new_zeroed(), 0);
#[allow(clippy::unit_cmp)]
{
assert_eq!(<()>::new_zeroed(), ());
}
}
#[test]
fn test_transparent_packed_generic_struct() {
#[derive(IntoBytes, FromBytes, Unaligned)]
#[repr(transparent)]
#[allow(dead_code)] struct Foo<T> {
_t: T,
_phantom: PhantomData<()>,
}
assert_impl_all!(Foo<u32>: FromZeros, FromBytes, IntoBytes);
assert_impl_all!(Foo<u8>: Unaligned);
#[derive(IntoBytes, FromBytes, Unaligned)]
#[repr(C, packed)]
#[allow(dead_code)] struct Bar<T, U> {
_t: T,
_u: U,
}
assert_impl_all!(Bar<u8, AU64>: FromZeros, FromBytes, IntoBytes, Unaligned);
}
#[cfg(feature = "alloc")]
mod alloc {
use super::*;
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[test]
fn test_extend_vec_zeroed() {
let mut v = vec![100u16, 200, 300];
FromZeros::extend_vec_zeroed(&mut v, 3).unwrap();
assert_eq!(v.len(), 6);
assert_eq!(&*v, &[100, 200, 300, 0, 0, 0]);
drop(v);
let mut v: Vec<u64> = Vec::new();
FromZeros::extend_vec_zeroed(&mut v, 3).unwrap();
assert_eq!(v.len(), 3);
assert_eq!(&*v, &[0, 0, 0]);
drop(v);
}
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[test]
fn test_extend_vec_zeroed_zst() {
let mut v = vec![(), (), ()];
<()>::extend_vec_zeroed(&mut v, 3).unwrap();
assert_eq!(v.len(), 6);
assert_eq!(&*v, &[(), (), (), (), (), ()]);
drop(v);
let mut v: Vec<()> = Vec::new();
<()>::extend_vec_zeroed(&mut v, 3).unwrap();
assert_eq!(&*v, &[(), (), ()]);
drop(v);
}
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[test]
fn test_insert_vec_zeroed() {
let mut v: Vec<u64> = Vec::new();
u64::insert_vec_zeroed(&mut v, 0, 2).unwrap();
assert_eq!(v.len(), 2);
assert_eq!(&*v, &[0, 0]);
drop(v);
let mut v = vec![100u64, 200, 300];
u64::insert_vec_zeroed(&mut v, 0, 2).unwrap();
assert_eq!(v.len(), 5);
assert_eq!(&*v, &[0, 0, 100, 200, 300]);
drop(v);
let mut v = vec![100u64, 200, 300];
u64::insert_vec_zeroed(&mut v, 1, 1).unwrap();
assert_eq!(v.len(), 4);
assert_eq!(&*v, &[100, 0, 200, 300]);
drop(v);
let mut v = vec![100u64, 200, 300];
u64::insert_vec_zeroed(&mut v, 3, 1).unwrap();
assert_eq!(v.len(), 4);
assert_eq!(&*v, &[100, 200, 300, 0]);
drop(v);
}
#[cfg(zerocopy_panic_in_const_and_vec_try_reserve_1_57_0)]
#[test]
fn test_insert_vec_zeroed_zst() {
let mut v: Vec<()> = Vec::new();
<()>::insert_vec_zeroed(&mut v, 0, 2).unwrap();
assert_eq!(v.len(), 2);
assert_eq!(&*v, &[(), ()]);
drop(v);
let mut v = vec![(), (), ()];
<()>::insert_vec_zeroed(&mut v, 0, 2).unwrap();
assert_eq!(v.len(), 5);
assert_eq!(&*v, &[(), (), (), (), ()]);
drop(v);
let mut v = vec![(), (), ()];
<()>::insert_vec_zeroed(&mut v, 1, 1).unwrap();
assert_eq!(v.len(), 4);
assert_eq!(&*v, &[(), (), (), ()]);
drop(v);
let mut v = vec![(), (), ()];
<()>::insert_vec_zeroed(&mut v, 3, 1).unwrap();
assert_eq!(v.len(), 4);
assert_eq!(&*v, &[(), (), (), ()]);
drop(v);
}
#[test]
fn test_new_box_zeroed() {
assert_eq!(u64::new_box_zeroed(), Ok(Box::new(0)));
}
#[test]
fn test_new_box_zeroed_array() {
drop(<[u32; 0x1000]>::new_box_zeroed());
}
#[test]
fn test_new_box_zeroed_zst() {
#[allow(clippy::unit_cmp)]
{
assert_eq!(<()>::new_box_zeroed(), Ok(Box::new(())));
}
}
#[test]
fn test_new_box_zeroed_with_elems() {
let mut s: Box<[u64]> = <[u64]>::new_box_zeroed_with_elems(3).unwrap();
assert_eq!(s.len(), 3);
assert_eq!(&*s, &[0, 0, 0]);
s[1] = 3;
assert_eq!(&*s, &[0, 3, 0]);
}
#[test]
fn test_new_box_zeroed_with_elems_empty() {
let s: Box<[u64]> = <[u64]>::new_box_zeroed_with_elems(0).unwrap();
assert_eq!(s.len(), 0);
}
#[test]
fn test_new_box_zeroed_with_elems_zst() {
let mut s: Box<[()]> = <[()]>::new_box_zeroed_with_elems(3).unwrap();
assert_eq!(s.len(), 3);
assert!(s.get(10).is_none());
#[allow(clippy::unit_cmp)]
{
assert_eq!(s[1], ());
}
s[2] = ();
}
#[test]
fn test_new_box_zeroed_with_elems_zst_empty() {
let s: Box<[()]> = <[()]>::new_box_zeroed_with_elems(0).unwrap();
assert_eq!(s.len(), 0);
}
#[test]
fn new_box_zeroed_with_elems_errors() {
assert_eq!(<[u16]>::new_box_zeroed_with_elems(usize::MAX), Err(AllocError));
let max = <usize as core::convert::TryFrom<_>>::try_from(isize::MAX).unwrap();
assert_eq!(
<[u16]>::new_box_zeroed_with_elems((max / mem::size_of::<u16>()) + 1),
Err(AllocError)
);
}
}
}