1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214
//! Utilities for safely re-interpreting `&[u8]` bytes as custom structs
//! and back without copying, for efficiently reading structured binary data.
//!
//! # Example
//!
//! Reading bytes:
//!
//! ```
//! use bytes_cast::{BytesCast, unaligned};
//!
//! #[derive(BytesCast)]
//! #[repr(C)]
//! struct Foo {
//! bar: [u8; 2],
//! baz: unaligned::U32Be,
//! }
//!
//! let input = &[1_u8, 2, 3, 4, 5, 6, 7, 8];
//!
//! let (foo, rest) = Foo::from_bytes(input).unwrap();
//! assert_eq!(foo.bar, [1_u8, 2]);
//! assert_eq!(foo.baz.get(), 0x0304_0506_u32);
//! assert_eq!(rest, &[7_u8, 8]);
//!
//! assert!(<[Foo; 2]>::from_bytes(input).is_err()); // input is too short
//!
//! let (values, rest) = unaligned::U16Le::slice_from_bytes(input, 2).unwrap();
//! assert_eq!(values.len(), 2);
//! assert_eq!(values[0].get(), 0x02_01_u16);
//! assert_eq!(values[1].get(), 0x04_03_u16);
//! assert_eq!(rest, &[5_u8, 6, 7, 8]);
//!
//! assert!(unaligned::U16Le::slice_from_bytes(input, 5).is_err()); // input is too short
//! ```
//!
//! Writing bytes:
//!
//! ```
//! # use bytes_cast::{BytesCast, unaligned};
//! # #[derive(BytesCast)]
//! # #[repr(C)]
//! # struct Foo {
//! # bar: [u8; 2],
//! # baz: unaligned::U32Be,
//! # }
//!
//! let foo = Foo { bar: [1, 2], baz: 0x0304_0506.into() };
//! assert_eq!(foo.as_bytes(), &[1_u8, 2, 3, 4, 5, 6]);
//!
//! let slice: &[unaligned::U16Le] = &[0x02_01.into(), 0x04_03.into()];
//! assert_eq!(slice.as_bytes(), &[1_u8, 2, 3, 4]);
//! ```
#![no_std]
pub use bytes_cast_derive::BytesCast;
use core::fmt;
use core::mem;
use core::slice;
pub mod unaligned;
#[cfg(doctest)]
mod compile_fail_tests;
/// Marks a type as safe to interpret from and to bytes without copying.
///
/// # Safety
///
/// For a type to implement this trait:
///
/// * All initialized bit patterns must be valid. (This excludes `bool`, enums, etc.)
/// * There must not be an alignment requirement. (`align_of() == 1`)
/// * There must be no padding or otherwise uninitialized bytes
///
/// # Deriving
///
/// Instead of writing `unsafe impl` blocks this trait should be derived.
/// `#[derive(BytesCast)]` on a type definition invokes a procedural macro
/// that implements the trait after checking that the type:
///
/// * Is a `struct`
/// * Is not generic
/// * Has a `#[repr(C)]` or `#[repr(transparent)]` attribute
/// * Has `align_of() == 1`
/// * Only has fields whose respective type implement `BytesCast`.
///
/// Failing any of these checks causes a compile-time error.
/// This excludes some types that could implement `BytesCast` without memory safety
/// issue:
///
/// * By choice: disabling field reordering with `repr` is not about memory
/// safety but making memory layout / field offsets predictable.
/// * By necessity: generics would make `align_of` potentially depend on type
/// parameters and not possible to statically check at the struct definition
/// site.
pub unsafe trait BytesCast {
/// Interpret the start of the given slice of bytes as reference to this
/// type.
///
/// If the given input is large enough, returns a tuple of the new
/// reference and the remaining of the bytes.
#[inline]
fn from_bytes(bytes: &[u8]) -> Result<(&Self, &[u8]), FromBytesError>
where
Self: Sized,
{
let expected_len = mem::size_of::<Self>();
remaining_bytes(bytes, expected_len).map(|rest| {
// Safety: this cast and dereference are made sound by the length
// check done in `remaining_bytes` together with the
// invariants of `BytesCast`.
let this = unsafe { &*bytes.as_ptr().cast::<Self>() };
(this, rest)
})
}
/// Interpret the start of the given slice of bytes as slice of this type.
///
/// If the given input is large enough, returns a tuple of the new
/// slice and the remaining of the bytes.
#[inline]
fn slice_from_bytes(bytes: &[u8], slice_len: usize) -> Result<(&[Self], &[u8]), FromBytesError>
where
Self: Sized,
{
let expected_byte_len =
mem::size_of::<Self>()
.checked_mul(slice_len)
.ok_or(FromBytesError {
input_len: bytes.len(),
expected_len: None,
})?;
remaining_bytes(bytes, expected_byte_len).map(|rest| {
// Safety: this cast and call are made sound by the length check
// done in `remaining_bytes` together with the invariants of
// `BytesCast`.
let this = unsafe { slice::from_raw_parts(bytes.as_ptr().cast::<Self>(), slice_len) };
(this, rest)
})
}
/// Interpret this value as the bytes of its memory representation.
#[inline]
fn as_bytes(&self) -> &[u8] {
let ptr: *const Self = self;
let bytes_ptr = ptr.cast::<u8>();
let bytes_len = mem::size_of_val(self);
// Safety: the invariants of `BytesCast` make this sound by definition:
unsafe { slice::from_raw_parts(bytes_ptr, bytes_len) }
}
}
/// If the given slice is long enough, return the the remaining bytes after the
/// given length.
#[inline]
fn remaining_bytes(bytes: &[u8], expected_byte_len: usize) -> Result<&[u8], FromBytesError> {
bytes.get(expected_byte_len..).ok_or(FromBytesError {
input_len: bytes.len(),
expected_len: Some(expected_byte_len),
})
}
/// The error type for [`BytesCast::from_bytes`] and
/// [`BytesCast::slice_from_bytes`].
pub struct FromBytesError {
expected_len: Option<usize>,
input_len: usize,
}
impl fmt::Display for FromBytesError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(expected_len) = self.expected_len {
write!(
f,
"Expected at least {} bytes, got {}",
expected_len, self.input_len
)
} else {
write!(f, "Expected byte size overflowed in slice_from_bytes")
}
}
}
impl fmt::Debug for FromBytesError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
unsafe impl<T: ?Sized> BytesCast for core::marker::PhantomData<T> {}
unsafe impl<T: BytesCast> BytesCast for [T] {}
unsafe impl BytesCast for () {}
unsafe impl BytesCast for u8 {}
// NOTE: We don’t implement BytesCast for tuples with 2 or more fields
// because they are subject to field reordering.
// Like with default-`repr` structs this is not a memory safety issue but still
// a footgun. Single-field tuples don’t have that problem but are much less
// useful in the first place.
// FIXME: Use const generics when we require Rust 1.51+
macro_rules! array_impls {
($($N: expr)+) => {
$(
unsafe impl<T: BytesCast> BytesCast for [T; $N] {}
)+
};
}
array_impls!(
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
);