#![allow(clippy::manual_map)]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "alloc")]
use alloc::{borrow::ToOwned, string::String};
use core::{
borrow::Borrow,
convert::TryInto,
fmt::{Debug, Display},
hash::Hash,
marker::PhantomData,
ops::Deref,
};
#[cfg(feature = "std")]
use std::io::Write;
use ref_cast::RefCast;
pub mod aligned_bytes;
use offset::align_offset;
pub mod casting;
mod offset;
#[cfg(feature = "alloc")]
pub(crate) mod buf;
use aligned_bytes::{empty_aligned, AlignedSlice, AsAligned, A8};
use casting::{AlignOf, AllBitPatternsValid};
#[doc(hidden)]
pub use gvariant_macro::{define_gv as _define_gv, gv_type as _gv_type};
pub trait Marker: Copy {
const TYPESTR: &'static [u8];
type Type: Cast + ?Sized;
fn cast<'a>(&self, data: &'a AlignedSlice<<Self::Type as AlignOf>::AlignOf>) -> &'a Self::Type {
Self::Type::from_aligned_slice(data)
}
fn try_cast_mut(
data: &mut AlignedSlice<<Self::Type as AlignOf>::AlignOf>,
) -> Result<&mut Self::Type, casting::WrongSize> {
Self::Type::try_from_aligned_slice_mut(data)
}
#[cfg(feature = "std")]
fn deserialize(
&self,
mut r: impl std::io::Read,
) -> std::io::Result<<Self::Type as ToOwned>::Owned> {
let mut buf = vec![];
r.read_to_end(&mut buf)?;
let buf: buf::AlignedBuf = buf.into();
Ok(self.cast(buf.as_aligned()).to_owned())
}
#[allow(clippy::wrong_self_convention)]
#[cfg(feature = "alloc")]
fn from_bytes(&self, data: impl AsRef<[u8]>) -> <Self::Type as ToOwned>::Owned {
let cow = aligned_bytes::copy_to_align(data.as_ref());
self.cast(cow.as_ref()).to_owned()
}
#[cfg(feature = "std")]
fn serialize(
&self,
data: impl SerializeTo<Self::Type>,
out: &mut impl Write,
) -> std::io::Result<usize> {
data.serialize(out)
}
#[cfg(feature = "std")]
fn serialize_to_vec(&self, data: impl SerializeTo<Self::Type>) -> Vec<u8> {
let mut out = vec![];
self.serialize(data, &mut out)
.expect("Serialization to Vec should be infallible");
out
}
}
pub trait SerializeTo<T: Cast + ?Sized> {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize>;
}
pub struct Owned<T: Cast + ?Sized> {
data: buf::AlignedBuf,
ty: PhantomData<T>,
}
impl<T: Cast + ?Sized> Debug for Owned<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deref().fmt(f)
}
}
impl<T: Cast + ?Sized> Display for Owned<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deref().fmt(f)
}
}
impl<T: Cast + ?Sized> Owned<T> {
pub fn from_bytes(s: &[u8]) -> Self {
Self {
data: s.to_owned().into(),
ty: PhantomData::<T> {},
}
}
pub fn from_vec(data: Vec<u8>) -> Self {
Self {
data: data.into(),
ty: PhantomData::<T> {},
}
}
}
impl<T: Cast + ?Sized> Borrow<T> for Owned<T> {
fn borrow(&self) -> &T {
&*self
}
}
impl<T: Cast + ?Sized> Deref for Owned<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
T::from_aligned_slice(self.data.as_aligned())
}
}
impl<T: ?Sized + Cast> From<Vec<u8>> for Owned<T> {
fn from(data: Vec<u8>) -> Self {
Self::from_vec(data)
}
}
#[macro_export]
macro_rules! gv {
($typestr:literal) => {{
#[allow(unused_imports)]
mod _m {
use $crate::aligned_bytes::{
align_offset, empty_aligned, AlignedOffset, AlignedSlice, AsAligned, A1, A2, A4, A8,
};
use $crate::casting::{AlignOf, AllBitPatternsValid};
use $crate::*;
_define_gv!($typestr);
#[derive(Copy, Clone)]
pub(crate) struct Marker();
impl $crate::Marker for Marker {
type Type = _gv_type!($typestr);
#[allow(clippy::string_lit_as_bytes)]
const TYPESTR: &'static [u8] = $typestr.as_bytes();
}
}
_m::Marker()
}};
}
#[doc(hidden)]
pub fn write_padding<A: aligned_bytes::Alignment, W: Write>(
offset: usize,
f: &mut W,
) -> std::io::Result<usize> {
let len = align_offset::<A>(offset).to_usize() - offset;
f.write_all(&b"\0\0\0\0\0\0\0"[..len])?;
Ok(len)
}
pub trait Cast:
casting::AlignOf + casting::AllBitPatternsValid + 'static + PartialEq + Debug + ToOwned
{
fn from_aligned_slice(slice: &AlignedSlice<Self::AlignOf>) -> &Self {
match Self::try_from_aligned_slice(slice) {
Ok(x) => x,
Err(_) => Self::default_ref(),
}
}
fn default_ref() -> &'static Self;
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize>;
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize>;
}
macro_rules! impl_cast_for {
($t:ty, $default:expr) => {
impl Cast for $t {
fn default_ref() -> &'static Self {
&$default
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
casting::try_cast_slice_to::<Self>(slice)
}
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
casting::try_cast_slice_to_mut::<Self>(slice)
}
}
impl SerializeTo<$t> for $t {
fn serialize(self, f: &mut impl std::io::Write) -> std::io::Result<usize> {
f.write_all(self.to_ne_bytes().as_ref())?;
Ok(std::mem::size_of::<$t>())
}
}
impl SerializeTo<$t> for &$t {
fn serialize(self, f: &mut impl std::io::Write) -> std::io::Result<usize> {
(*self).serialize(f)
}
}
};
}
impl_cast_for!(u8, 0);
impl_cast_for!(u16, 0);
impl_cast_for!(i16, 0);
impl_cast_for!(u32, 0);
impl_cast_for!(i32, 0);
impl_cast_for!(u64, 0);
impl_cast_for!(i64, 0);
impl_cast_for!(f64, 0.);
#[derive(RefCast, Eq)]
#[repr(transparent)]
pub struct Str {
data: [u8],
}
impl Str {
pub fn as_bytes_non_conformant(&self) -> &[u8] {
let d: &[u8] = self.data.as_ref();
match d.last() {
Some(b'\0') => &d[..d.len() - 1],
_ => b"",
}
}
pub fn to_str(&self) -> &str {
let b = self.as_bytes_non_conformant();
if memchr::memchr(b'\0', b).is_some() {
""
} else {
match core::str::from_utf8(self.as_bytes_non_conformant()) {
Ok(x) => x,
Err(_) => "",
}
}
}
}
unsafe impl AllBitPatternsValid for Str {}
unsafe impl AlignOf for Str {
type AlignOf = aligned_bytes::A1;
}
impl Cast for Str {
fn default_ref() -> &'static Self {
unsafe { &*(b"" as *const [u8] as *const Str) }
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
Ok(Self::ref_cast(slice.as_ref()))
}
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
Ok(Self::ref_cast_mut(slice.as_mut()))
}
}
impl SerializeTo<Str> for &Str {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
let b = self.to_str().as_bytes();
f.write_all(b)?;
f.write_all(b"\0")?;
Ok(b.len() + 1)
}
}
impl SerializeTo<Str> for &str {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
let b = self.as_bytes();
if memchr::memchr(b'\0', b).is_some() {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Strings may not contain NULs",
));
}
f.write_all(self.as_bytes())?;
f.write_all(b"\0")?;
Ok(self.len() + 1)
}
}
impl<T: SerializeTo<Str> + Copy> SerializeTo<Str> for &T {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
(*self).serialize(f)
}
}
impl SerializeTo<Str> for &Box<Str> {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
self.to_str().serialize(f)
}
}
impl SerializeTo<Str> for &String {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
self.as_str().serialize(f)
}
}
impl PartialEq for Str {
fn eq(&self, other: &Self) -> bool {
self.as_bytes_non_conformant() == other.as_bytes_non_conformant()
|| self.to_str() == other.to_str()
}
}
impl PartialEq<Str> for str {
fn eq(&self, other: &Str) -> bool {
self == other.to_str()
}
}
impl PartialEq<str> for Str {
fn eq(&self, other: &str) -> bool {
self.to_str() == other
}
}
impl Display for Str {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Display::fmt(self.to_str(), f)
}
}
impl Debug for Str {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(self.to_str(), f)
}
}
impl<'a> From<&'a Str> for &'a str {
fn from(x: &'a Str) -> Self {
x.to_str()
}
}
#[cfg(feature = "alloc")]
impl From<&Str> for String {
fn from(x: &Str) -> Self {
x.to_str().into()
}
}
#[cfg(feature = "alloc")]
impl ToOwned for Str {
type Owned = GString;
fn to_owned(&self) -> Self::Owned {
GString::from_str_unchecked(self.to_str())
}
}
struct DisplayUtf8Lossy<'a>(&'a [u8]);
impl core::fmt::Display for DisplayUtf8Lossy<'_> {
#[cfg(feature = "alloc")]
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Display::fmt(&String::from_utf8_lossy(self.0).as_ref(), f)
}
#[cfg(not(feature = "alloc"))]
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Display::fmt(
match core::str::from_utf8(self.0) {
Ok(x) => x,
Err(_) => "<Error: Invalid Utf-8>",
},
f,
)
}
}
impl core::fmt::Debug for DisplayUtf8Lossy<'_> {
#[cfg(feature = "alloc")]
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&String::from_utf8_lossy(self.0).as_ref(), f)
}
#[cfg(not(feature = "alloc"))]
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match core::str::from_utf8(self.0) {
Ok(x) => core::fmt::Debug::fmt(x, f),
Err(_) => core::fmt::Display::fmt("<Error: Invalid Utf-8>", f),
}
}
}
#[cfg(feature = "alloc")]
mod gstring {
use super::*;
use core::{borrow::Borrow, convert::TryFrom, ops::Deref};
#[cfg(feature = "std")]
use std::ffi::CStr;
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct GString {
data: String,
}
impl Default for GString {
fn default() -> Self {
Self::new()
}
}
impl GString {
pub fn as_str(&self) -> &str {
self.as_ref()
}
pub fn capacity(&self) -> usize {
self.data.capacity() - 1
}
pub fn reserve(&mut self, additional: usize) {
self.data.reserve(additional)
}
pub fn clear(&mut self) {
self.data.clear();
self.data.push('\0');
}
pub fn len(&self) -> usize {
self.data.len() - 1
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn new() -> Self {
Self {
data: "\0".to_owned(),
}
}
pub fn with_capacity(capacity: usize) -> Self {
let mut data = String::with_capacity(capacity + 1);
data.push('\0');
Self {
data: "\0".to_owned(),
}
}
pub fn try_push_str(&mut self, s: &str) -> Result<(), ContainsNulBytesError> {
if memchr::memchr(b'\0', s.as_bytes()).is_some() {
Err(ContainsNulBytesError())
} else {
self.data.reserve(s.len());
self.data.pop();
self.data += s;
self.data.push('\0');
Ok(())
}
}
pub(crate) fn from_str_unchecked(s: &str) -> Self {
let mut data = String::with_capacity(s.len() + 1);
data.push_str(s);
data.push('\0');
Self { data }
}
}
impl Display for GString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s: &str = self.as_ref();
Display::fmt(s, f)
}
}
impl Debug for GString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s: &str = self.as_ref();
Debug::fmt(s, f)
}
}
impl Borrow<Str> for GString {
fn borrow(&self) -> &Str {
self.deref()
}
}
impl Deref for GString {
type Target = Str;
fn deref(&self) -> &Self::Target {
Str::try_from_aligned_slice(self.data.as_bytes().as_aligned()).unwrap()
}
}
impl core::fmt::Write for GString {
fn write_str(&mut self, s: &str) -> core::fmt::Result {
self.try_push_str(s).map_err(|_| core::fmt::Error)
}
}
pub struct ContainsNulBytesError();
impl TryFrom<String> for GString {
type Error = ContainsNulBytesError;
fn try_from(mut value: String) -> Result<Self, Self::Error> {
if memchr::memchr(b'\0', value.as_bytes()).is_some() {
Err(ContainsNulBytesError())
} else {
value.push('\0');
Ok(GString { data: value })
}
}
}
impl From<GString> for String {
fn from(mut s: GString) -> Self {
s.data.pop();
s.data
}
}
#[cfg(feature = "std")]
impl From<GString> for std::ffi::CString {
fn from(s: GString) -> Self {
Self::from_vec_with_nul(s.data.into()).unwrap()
}
}
#[cfg(feature = "std")]
impl AsRef<CStr> for GString {
fn as_ref(&self) -> &CStr {
CStr::from_bytes_with_nul(self.data.as_bytes()).unwrap()
}
}
impl AsRef<str> for GString {
fn as_ref(&self) -> &str {
&self.data[..self.data.len() - 1]
}
}
}
#[cfg(feature = "alloc")]
pub use gstring::{ContainsNulBytesError, GString};
#[derive(RefCast)]
#[repr(transparent)]
pub struct Variant(AlignedSlice<A8>);
unsafe impl AlignOf for Variant {
type AlignOf = A8;
}
unsafe impl AllBitPatternsValid for Variant {}
impl Cast for Variant {
fn default_ref() -> &'static Self {
Self::ref_cast(empty_aligned())
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
Ok(Self::ref_cast(slice))
}
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
Ok(Self::ref_cast_mut(slice))
}
}
impl SerializeTo<Variant> for &Variant {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
let (typestr, data) = self.split();
f.write_all(data)?;
f.write_all(b"\0")?;
f.write_all(typestr)?;
Ok(data.len() + typestr.len() + 1)
}
}
impl Debug for Variant {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let (gv_type, data) = self.split();
write!(
f,
"Variant {{ type: {:?}, data: {:?} }}",
DisplayUtf8Lossy(gv_type),
data.as_ref() as &[u8]
)
}
}
#[cfg(feature = "alloc")]
impl ToOwned for Variant {
type Owned = Owned<Self>;
fn to_owned(&self) -> Self::Owned {
Owned::from_bytes(&*self.0)
}
}
impl Variant {
pub fn get<M: Marker>(&self, m: M) -> Option<&M::Type>
where
AlignedSlice<A8>: AsAligned<<M::Type as AlignOf>::AlignOf>,
{
let (typestr, data) = self.split();
if typestr == M::TYPESTR {
Some(m.cast(data.as_aligned()))
} else {
None
}
}
pub fn split(&self) -> (&[u8], &AlignedSlice<A8>) {
let split_pos = memchr::memrchr(b'\0', &self.0);
if let Some(mid) = split_pos {
let (data, ty) = self.0.split_at(mid);
(&ty[1..], data)
} else {
(b"()", empty_aligned())
}
}
}
impl PartialEq for Variant {
fn eq(&self, other: &Self) -> bool {
self.split() == other.split()
}
}
#[derive(Debug, Copy, Clone)]
pub struct VariantWrap<M: Marker, T: SerializeTo<M::Type>>(pub M, pub T);
impl<M: Marker, T: SerializeTo<M::Type>> SerializeTo<Variant> for VariantWrap<M, T> {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
let len = self.0.serialize(self.1, f)?;
f.write_all(b"\0")?;
f.write_all(M::TYPESTR)?;
Ok(len + 1 + M::TYPESTR.len())
}
}
impl<'a, T: Cast + 'static + Copy> Cast for [T] {
fn default_ref() -> &'static Self {
&[]
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
casting::cast_slice::<Self::AlignOf, T>(slice)
}
fn try_from_aligned_slice_mut(
_: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
todo!()
}
}
impl<GvT: Cast + Copy, It: IntoIterator> SerializeTo<[GvT]> for It
where
It::Item: SerializeTo<GvT>,
{
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
let mut bytes_written = 0;
for x in self.into_iter() {
bytes_written += x.serialize(f)?;
}
Ok(bytes_written)
}
}
#[doc(hidden)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum OffsetSize {
U0 = 0,
U1 = 1,
U2 = 2,
U4 = 4,
U8 = 8,
}
#[doc(hidden)]
pub fn offset_size(len: usize) -> OffsetSize {
match len {
0 => OffsetSize::U0,
0x1..=0xFF => OffsetSize::U1,
0x100..=0xFFFF => OffsetSize::U2,
0x1_0000..=0xFFFF_FFFF => OffsetSize::U4,
0x1_0000_0000..=0xFFFF_FFFF_FFFF_FFFF => OffsetSize::U8,
_ => unreachable!(),
}
}
#[doc(hidden)]
pub fn read_uint(data: &[u8], size: OffsetSize, n: usize) -> usize {
let s = n * size as usize;
match size {
OffsetSize::U0 => 0,
OffsetSize::U1 => data[s] as usize,
OffsetSize::U2 => u16::from_le_bytes(data[s..s + 2].try_into().unwrap()) as usize,
OffsetSize::U4 => u32::from_le_bytes(data[s..s + 4].try_into().unwrap()) as usize,
OffsetSize::U8 => u64::from_le_bytes(data[s..s + 8].try_into().unwrap()) as usize,
}
}
fn read_last_frame_offset(data: &[u8]) -> (OffsetSize, usize) {
let osz = offset_size(data.len());
if osz == OffsetSize::U0 {
(OffsetSize::U1, 0)
} else {
let last = read_uint(&data[data.len() - osz as usize..], osz, 0);
if last > data.len() {
return (osz, data.len());
}
let size = data.len() - last;
if size % osz as usize == 0 {
(osz, last)
} else {
(osz, data.len())
}
}
}
#[derive(RefCast)]
#[repr(transparent)]
pub struct NonFixedWidthArray<T: Cast + ?Sized> {
data: AlignedSlice<T::AlignOf>,
}
impl<T: Cast + Debug + ?Sized> Debug for NonFixedWidthArray<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "[")?;
for child in self {
write!(f, "{:?}, ", child)?;
}
write!(f, "]")
}
}
#[cfg(feature = "alloc")]
impl<T: Cast + ?Sized> ToOwned for NonFixedWidthArray<T> {
type Owned = Owned<Self>;
fn to_owned(&self) -> Self::Owned {
Self::Owned::from_bytes(&*self.data)
}
}
unsafe impl<T: Cast + ?Sized> AlignOf for NonFixedWidthArray<T> {
type AlignOf = T::AlignOf;
}
unsafe impl<T: Cast + ?Sized> AllBitPatternsValid for NonFixedWidthArray<T> {}
impl<T: Cast + ?Sized> Cast for NonFixedWidthArray<T> {
fn default_ref() -> &'static Self {
Self::ref_cast(empty_aligned())
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
Ok(Self::ref_cast(slice))
}
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
Ok(Self::ref_cast_mut(slice))
}
}
impl<T: Cast + ?Sized> NonFixedWidthArray<T> {
pub fn len(&self) -> usize {
let (osz, lfo) = read_last_frame_offset(&self.data);
(self.data.len() - lfo) / osz as usize
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn iter(&self) -> NonFixedWidthArrayIterator<T> {
self.into_iter()
}
pub fn first(&self) -> Option<&T> {
if self.is_empty() {
None
} else {
Some(&self[0])
}
}
pub fn last(&self) -> Option<&T> {
if self.is_empty() {
None
} else {
Some(&self[self.len() - 1])
}
}
}
impl<T: Cast + PartialEq + ?Sized> PartialEq for NonFixedWidthArray<T> {
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
for (a, b) in self.iter().zip(other) {
if a != b {
return false;
}
}
true
}
}
impl<T: Cast + PartialEq + Eq + ?Sized> Eq for NonFixedWidthArray<T> {}
pub struct NonFixedWidthArrayIterator<'a, Item: Cast + ?Sized> {
data: &'a AlignedSlice<Item::AlignOf>,
offsets: &'a [u8],
next_start: usize,
offset_idx: usize,
offset_size: OffsetSize,
}
impl<Item: Cast + ?Sized + PartialEq<T>, T: ?Sized> PartialEq<[&T]> for NonFixedWidthArray<Item> {
fn eq(&self, other: &[&T]) -> bool {
if self.len() != other.len() {
return false;
}
for (a, b) in self.iter().zip(other.iter()) {
if a != *b {
return false;
}
}
true
}
}
impl<Item: Cast + ?Sized + PartialEq<T>, T: ?Sized> PartialEq<NonFixedWidthArray<Item>> for [&T] {
fn eq(&self, other: &NonFixedWidthArray<Item>) -> bool {
other == self
}
}
impl<'a, Item: Cast + 'static + ?Sized> Iterator for NonFixedWidthArrayIterator<'a, Item> {
type Item = &'a Item;
fn next(&mut self) -> Option<Self::Item> {
if self.offset_idx >= self.offsets.len() {
None
} else {
let start = align_offset::<Item::AlignOf>(self.next_start);
let end = read_uint(&self.offsets[self.offset_idx..], self.offset_size, 0);
self.offset_idx += self.offset_size as usize;
self.next_start = end;
if end < start || end > self.data.len() {
Some(Item::try_from_aligned_slice(aligned_bytes::empty_aligned()).unwrap())
} else {
Some(Item::try_from_aligned_slice(&self.data[..end][start..]).unwrap())
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let l = match self.offset_size {
OffsetSize::U0 => 0,
_ => (self.offsets.len() - self.offset_idx) / self.offset_size as usize,
};
(l, Some(l))
}
}
impl<'a, Item: Cast + ?Sized> ExactSizeIterator for NonFixedWidthArrayIterator<'a, Item> {}
impl<'a, Item: Cast + 'static + ?Sized> IntoIterator for &'a NonFixedWidthArray<Item> {
type Item = &'a Item;
type IntoIter = NonFixedWidthArrayIterator<'a, Item>;
fn into_iter(self) -> Self::IntoIter {
let (osz, lfo) = read_last_frame_offset(&self.data);
let (data, offsets) = self.data.split_at(lfo);
NonFixedWidthArrayIterator {
data,
offsets,
next_start: 0,
offset_idx: 0,
offset_size: osz,
}
}
}
impl<Item: Cast + 'static + ?Sized> core::ops::Index<usize> for NonFixedWidthArray<Item> {
type Output = Item;
fn index(&self, index: usize) -> &Self::Output {
let (osz, lfo) = read_last_frame_offset(&self.data);
let frame_offsets = &self.data.as_ref()[lfo..];
let end = read_uint(frame_offsets, osz, index);
let start = align_offset::<Item::AlignOf>(match index {
0 => 0,
x => read_uint(frame_offsets, osz, x - 1),
});
if start < self.data.len() && end <= lfo && start <= end {
Item::try_from_aligned_slice(&self.data[..end][start..]).unwrap()
} else {
Item::try_from_aligned_slice(aligned_bytes::empty_aligned()).unwrap()
}
}
}
impl<GvT: Cast + ?Sized, It: IntoIterator> SerializeTo<NonFixedWidthArray<GvT>> for It
where
It::Item: SerializeTo<GvT>,
{
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
let mut bytes_written = 0;
let mut offsets = vec![];
for x in self.into_iter() {
bytes_written += x.serialize(f)?;
offsets.push(bytes_written);
let padding = align_offset::<GvT::AlignOf>(bytes_written).to_usize() - bytes_written;
f.write_all(&b"\0\0\0\0\0\0\0"[..padding])?;
bytes_written += padding;
}
write_offsets(bytes_written, offsets.as_ref(), f)
}
}
#[doc(hidden)]
pub fn write_offsets(
mut bytes_written: usize,
offsets: &[usize],
f: &mut impl Write,
) -> std::io::Result<usize> {
if bytes_written + offsets.len() <= 0xff {
bytes_written += offsets.len();
for offset in offsets {
f.write_all((*offset as u8).to_le_bytes().as_ref())?;
}
} else if bytes_written + offsets.len() * 2 <= 0xffff {
bytes_written += offsets.len() * 2;
for offset in offsets {
f.write_all((*offset as u16).to_le_bytes().as_ref())?;
}
} else if bytes_written + offsets.len() * 4 <= 0xffff_ffff {
bytes_written += offsets.len() * 4;
for offset in offsets {
f.write_all((*offset as u32).to_le_bytes().as_ref())?;
}
} else {
bytes_written += offsets.len() * 8;
for offset in offsets {
f.write_all((*offset as u64).to_le_bytes().as_ref())?;
}
}
Ok(bytes_written)
}
#[repr(transparent)]
#[derive(RefCast)]
pub struct MaybeFixedSize<T: Cast> {
marker: PhantomData<T>,
data: AlignedSlice<T::AlignOf>,
}
#[cfg(feature = "alloc")]
impl<T: Cast> ToOwned for MaybeFixedSize<T> {
type Owned = Owned<Self>;
fn to_owned(&self) -> Self::Owned {
Owned::from_bytes(&*self.data)
}
}
impl<T: Cast + Debug> Debug for MaybeFixedSize<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.to_option().fmt(f)
}
}
impl<T: Cast> MaybeFixedSize<T> {
pub fn to_option(&self) -> Option<&T> {
T::try_from_aligned_slice(&self.data).ok()
}
}
impl<'a, T: Cast> From<&'a MaybeFixedSize<T>> for Option<&'a T> {
fn from(m: &'a MaybeFixedSize<T>) -> Self {
m.to_option()
}
}
impl<T: Cast + PartialEq> PartialEq for MaybeFixedSize<T> {
fn eq(&self, other: &Self) -> bool {
self.to_option() == other.to_option()
}
}
impl<T: Cast + Eq> Eq for MaybeFixedSize<T> {}
impl<T: Cast + PartialEq> PartialEq<Option<&T>> for &MaybeFixedSize<T> {
fn eq(&self, other: &Option<&T>) -> bool {
self.to_option() == *other
}
}
impl<T: Cast + PartialEq> PartialEq<&MaybeFixedSize<T>> for Option<&T> {
fn eq(&self, other: &&MaybeFixedSize<T>) -> bool {
other == self
}
}
impl<T: Cast + PartialOrd> PartialOrd for MaybeFixedSize<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.to_option().partial_cmp(&other.to_option())
}
}
impl<T: Cast + Hash> Hash for MaybeFixedSize<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.to_option().hash(state)
}
}
unsafe impl<T: Cast> AlignOf for MaybeFixedSize<T> {
type AlignOf = T::AlignOf;
}
unsafe impl<T: Cast> AllBitPatternsValid for MaybeFixedSize<T> {}
impl<T: Cast> Cast for MaybeFixedSize<T> {
fn default_ref() -> &'static Self {
Self::ref_cast(empty_aligned())
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
Ok(Self::ref_cast(slice))
}
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
Ok(Self::ref_cast_mut(slice))
}
}
impl<'a, T: Cast> IntoIterator for &'a MaybeFixedSize<T> {
type Item = &'a T;
type IntoIter = core::option::IntoIter<&'a T>;
fn into_iter(self) -> Self::IntoIter {
self.to_option().into_iter()
}
}
impl<'a, GvT: Cast + SerializeTo<GvT>> SerializeTo<MaybeFixedSize<GvT>> for &'a MaybeFixedSize<GvT>
where
&'a GvT: SerializeTo<GvT>,
{
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
SerializeTo::<MaybeFixedSize<GvT>>::serialize(&self.to_option(), f)
}
}
impl<GvT: Cast, T: SerializeTo<GvT> + Copy> SerializeTo<MaybeFixedSize<GvT>> for &Option<T>
where
T: SerializeTo<GvT>,
{
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
if let Some(x) = self {
x.serialize(f)
} else {
Ok(0)
}
}
}
#[derive(RefCast)]
#[repr(transparent)]
pub struct MaybeNonFixedSize<T: Cast + ?Sized> {
marker: PhantomData<T>,
data: AlignedSlice<T::AlignOf>,
}
impl<T: Cast + Debug + ?Sized> Debug for MaybeNonFixedSize<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.to_option().fmt(f)
}
}
#[cfg(feature = "alloc")]
impl<T: Cast + ?Sized> ToOwned for MaybeNonFixedSize<T> {
type Owned = Owned<Self>;
fn to_owned(&self) -> Self::Owned {
Owned::from_bytes(&*self.data)
}
}
impl<T: Cast + ?Sized> MaybeNonFixedSize<T> {
pub fn to_option(&self) -> Option<&T> {
if self.data.is_empty() {
None
} else {
Some(T::try_from_aligned_slice(&self.data[..self.data.len() - 1]).unwrap())
}
}
}
unsafe impl<T: Cast + ?Sized> AlignOf for MaybeNonFixedSize<T> {
type AlignOf = T::AlignOf;
}
unsafe impl<T: Cast + ?Sized> AllBitPatternsValid for MaybeNonFixedSize<T> {}
impl<T: Cast + ?Sized> Cast for MaybeNonFixedSize<T> {
fn default_ref() -> &'static Self {
Self::ref_cast(empty_aligned())
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
Ok(Self::ref_cast(slice))
}
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
Ok(Self::ref_cast_mut(slice))
}
}
impl<'a, T: Cast + ?Sized> IntoIterator for &'a MaybeNonFixedSize<T> {
type Item = &'a T;
type IntoIter = core::option::IntoIter<&'a T>;
fn into_iter(self) -> Self::IntoIter {
self.to_option().into_iter()
}
}
impl<'a, T: Cast + ?Sized> From<&'a MaybeNonFixedSize<T>> for Option<&'a T> {
fn from(m: &'a MaybeNonFixedSize<T>) -> Self {
m.to_option()
}
}
impl<T: Cast + PartialEq + ?Sized> PartialEq for MaybeNonFixedSize<T> {
fn eq(&self, other: &Self) -> bool {
self.to_option() == other.to_option()
}
}
impl<T: Cast + Eq + ?Sized> Eq for MaybeNonFixedSize<T> {}
impl<T: Cast + PartialEq> PartialEq<Option<&T>> for MaybeNonFixedSize<T> {
fn eq(&self, other: &Option<&T>) -> bool {
self.to_option() == *other
}
}
impl<T: Cast + PartialEq> PartialEq<MaybeNonFixedSize<T>> for Option<&T> {
fn eq(&self, other: &MaybeNonFixedSize<T>) -> bool {
other == self
}
}
impl<GvT: Cast + ?Sized, T: IntoIterator> SerializeTo<MaybeNonFixedSize<GvT>> for T
where
T::Item: SerializeTo<GvT>,
{
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
if let Some(x) = self.into_iter().next() {
let len = x.serialize(f)?;
f.write_all(b"\0")?;
Ok(len + 1)
} else {
Ok(0)
}
}
}
#[derive(RefCast, Eq, Copy, Clone)]
#[repr(transparent)]
pub struct Bool(u8);
impl Bool {
pub fn to_bool(self) -> bool {
self.0 > 0
}
}
impl Cast for Bool {
fn default_ref() -> &'static Self {
&Bool(0u8)
}
fn try_from_aligned_slice(
slice: &AlignedSlice<Self::AlignOf>,
) -> Result<&Self, casting::WrongSize> {
casting::try_cast_slice_to::<Self>(slice)
}
fn try_from_aligned_slice_mut(
slice: &mut AlignedSlice<Self::AlignOf>,
) -> Result<&mut Self, casting::WrongSize> {
casting::try_cast_slice_to_mut::<Self>(slice)
}
}
impl Debug for Bool {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&self.to_bool(), f)
}
}
unsafe impl AllBitPatternsValid for Bool {}
unsafe impl AlignOf for Bool {
type AlignOf = aligned_bytes::A1;
}
impl From<Bool> for bool {
fn from(b: Bool) -> Self {
b.to_bool()
}
}
impl PartialEq for Bool {
fn eq(&self, other: &Self) -> bool {
self.to_bool() == other.to_bool()
}
}
impl PartialEq<bool> for Bool {
fn eq(&self, other: &bool) -> bool {
self.to_bool() == *other
}
}
impl PartialEq<Bool> for bool {
fn eq(&self, other: &Bool) -> bool {
other == self
}
}
impl SerializeTo<Bool> for &Bool {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
self.to_bool().serialize(f)
}
}
impl SerializeTo<Bool> for &bool {
fn serialize(self, f: &mut impl Write) -> std::io::Result<usize> {
f.write_all(if *self { b"\x01" } else { b"\x00" })?;
Ok(1)
}
}
pub trait Structure<'a>: Cast + Debug + casting::AlignOf + casting::AllBitPatternsValid {
type RefTuple;
fn to_tuple(&'a self) -> Self::RefTuple;
}
#[inline]
fn nth_last_frame_offset(data: &[u8], osz: crate::OffsetSize, n: usize) -> Option<usize> {
if n == 0 {
Some(0)
} else if let Some(off) = usize::checked_sub(data.len(), n * osz as usize) {
Some(read_uint(&data[off..], osz, 0))
} else {
None
}
}
#[inline]
fn calc_offsets<ChildAlign: aligned_bytes::Alignment, B: aligned_bytes::Alignment>(
data: &[u8],
i: isize,
a: usize,
c: usize,
size: Option<usize>,
last_child: bool,
n_frame_offsets: usize,
) -> Option<(aligned_bytes::AlignedOffset<ChildAlign>, usize)>
where
aligned_bytes::AlignedOffset<B>: Into<aligned_bytes::AlignedOffset<ChildAlign>>,
{
let osz = offset_size(data.len());
let fo = nth_last_frame_offset(data, osz, (i + 1) as usize)?;
let data_end = usize::checked_sub(data.len(), osz as usize * n_frame_offsets)?;
let start: aligned_bytes::AlignedOffset<ChildAlign> = align_offset::<B>(fo + a).into()
+ aligned_bytes::AlignedOffset::<ChildAlign>::try_new(c).unwrap();
let end = if let Some(size) = size {
start.to_usize() + size
} else if last_child {
data_end
} else {
nth_last_frame_offset(data, osz, (i + 2) as usize)?
};
if start <= end && end <= data_end {
Some((start, end))
} else {
None
}
}
#[doc(hidden)]
#[inline]
pub fn get_child_elem<T: Cast + ?Sized, B: aligned_bytes::Alignment>(
data: &AlignedSlice<<T as AlignOf>::AlignOf>,
i: isize,
a: usize,
c: usize,
child_size: Option<usize>,
last_child: bool,
n_frame_offsets: usize,
) -> &T
where
aligned_bytes::AlignedOffset<B>: Into<aligned_bytes::AlignedOffset<T::AlignOf>>,
{
match calc_offsets::<<T as AlignOf>::AlignOf, B>(
data,
i,
a,
c,
child_size,
last_child,
n_frame_offsets,
) {
Some((start, end)) => T::from_aligned_slice(&data[..end][start..]),
None => T::default_ref(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use aligned_bytes::{copy_to_align, AlignedSlice, AsAligned, A8};
#[test]
fn test_numbers() {
let data = copy_to_align(&[1, 2, 3, 4, 5, 6, 7, 8, 9]);
let aligned_slice: &AlignedSlice<A8> = data.as_ref();
assert_eq!(
*i32::from_aligned_slice(&aligned_slice[..0].as_aligned()),
0
);
assert_eq!(
*i32::from_aligned_slice(&aligned_slice[..3].as_aligned()),
0
);
assert_eq!(
*i32::from_aligned_slice(&aligned_slice[..5].as_aligned()),
0
);
assert_eq!(
*i32::from_aligned_slice(&aligned_slice[..8].as_aligned()),
0
);
assert_eq!(
Bool::from_aligned_slice(&aligned_slice[..1].as_aligned()).to_bool(),
true
);
assert_eq!(
*u8::from_aligned_slice(&aligned_slice[..1].as_aligned()),
0x01
);
assert_eq!(
*i16::from_aligned_slice(&aligned_slice[..2].as_aligned()),
0x0201
);
assert_eq!(
*u16::from_aligned_slice(&aligned_slice[..2].as_aligned()),
0x0201
);
assert_eq!(
*i32::from_aligned_slice(&aligned_slice[..4].as_aligned()),
0x04030201
);
assert_eq!(
*u32::from_aligned_slice(&aligned_slice[..4].as_aligned()),
0x04030201
);
assert_eq!(
*i64::from_aligned_slice(&aligned_slice[..8]),
0x0807060504030201
);
assert_eq!(
*u64::from_aligned_slice(&aligned_slice[..8]),
0x0807060504030201
);
assert_eq!(
*f64::from_aligned_slice(&aligned_slice[..8]),
f64::from_bits(0x0807060504030201)
);
}
#[test]
fn test_non_fixed_width_maybe() {
assert_eq!(
MaybeNonFixedSize::<Str>::from_aligned_slice(b"".as_aligned()).to_option(),
None
);
assert_eq!(
MaybeNonFixedSize::<Str>::from_aligned_slice(b"\0".as_aligned())
.to_option()
.unwrap(),
""
);
assert_eq!(
MaybeNonFixedSize::<Str>::from_aligned_slice(b"hello world\0\0".as_aligned())
.to_option()
.unwrap(),
"hello world"
);
}
#[test]
fn test_fixed_width_maybe() {
assert_eq!(
MaybeFixedSize::<u8>::from_aligned_slice(b"".as_aligned()),
None
);
assert_eq!(
MaybeFixedSize::<u8>::from_aligned_slice(b"\x43".as_aligned()),
Some(&0x43)
);
assert_eq!(
MaybeFixedSize::<u8>::from_aligned_slice(b"\x43\0".as_aligned()),
None
);
}
#[test]
fn test_non_fixed_width_array() {
let a_s = NonFixedWidthArray::<Str>::from_aligned_slice(b"".as_aligned());
assert_eq!(a_s.len(), 0);
assert!(a_s.is_empty());
assert_eq!(a_s.first(), None);
assert_eq!(a_s.last(), None);
assert!(a_s.into_iter().collect::<Vec<_>>().is_empty());
assert_eq!(a_s.iter().size_hint(), (0, Some(0)));
let a_s =
NonFixedWidthArray::<Str>::from_aligned_slice(b"hello\0world\0\x06\x0c".as_aligned());
assert_eq!(a_s.len(), 2);
assert_eq!(
a_s.into_iter().map(|x| x.to_str()).collect::<Vec<_>>(),
&["hello", "world"]
);
assert_eq!(&a_s[0], "hello");
assert_eq!(&a_s[1], "world");
assert!(!a_s.is_empty());
assert_eq!(a_s.first().unwrap(), "hello");
assert_eq!(a_s.last().unwrap(), "world");
let mut it = a_s.iter();
assert_eq!(it.size_hint(), (2, Some(2)));
it.next();
assert_eq!(it.size_hint(), (1, Some(1)));
it.next();
assert_eq!(it.size_hint(), (0, Some(0)));
let nfwa = NonFixedWidthArray::<[u8]>::from_aligned_slice(b"\x08".as_aligned());
let v = assert_array_self_consistent(nfwa);
assert_eq!(v.as_slice(), &[] as &[&[u8]]);
let nfwa = NonFixedWidthArray::<[u8]>::from_aligned_slice(b"\x01\x00".as_aligned());
let v = assert_array_self_consistent(nfwa);
assert_eq!(v, [&[] as &[u8], &[]]);
let mut data = [0u8; 258];
data[256..].copy_from_slice(&255u16.to_le_bytes());
let cow = copy_to_align(&data);
let nfwa = NonFixedWidthArray::<[u8]>::from_aligned_slice(cow.as_ref());
let v = assert_array_self_consistent(nfwa);
assert_eq!(v.as_slice(), &[] as &[&[u8]]);
}
fn assert_array_self_consistent<T: Cast + ?Sized>(a: &NonFixedWidthArray<T>) -> Vec<&T> {
let v: Vec<_> = a.iter().collect();
assert_eq!(a.len(), v.len());
for (n, elem) in v.iter().enumerate() {
assert_eq!(**elem, a[n]);
}
assert!(a.iter().len() == a.len());
v
}
#[test]
#[should_panic]
fn test_non_fixed_width_array_panic() {
let nfwa = NonFixedWidthArray::<[u8]>::from_aligned_slice(b"\x08".as_aligned());
_ = &nfwa[0];
}
#[test]
fn test_spec_examples() {
assert_eq!(
gv!("s").from_bytes(b"hello world\0").as_str(),
"hello world"
);
assert_eq!(gv!("s").serialize_to_vec("hello world"), b"hello world\0");
assert_eq!(
gv!("ms")
.from_bytes(b"hello world\0\0")
.to_option()
.unwrap(),
"hello world"
);
assert_eq!(
gv!("ms").serialize_to_vec(&Some("hello world")),
b"hello world\0\0"
);
assert_eq!(
gv!("ab").cast(b"\x01\x00\x00\x01\x01".as_aligned()),
[true, false, false, true, true]
);
assert_eq!(
gv!("ab").serialize_to_vec(&[true, false, false, true, true][..]),
b"\x01\x00\x00\x01\x01"
);
let a = gv!("as").from_bytes(b"i\0can\0has\0strings?\0\x02\x06\x0a\x13");
assert_array_self_consistent(&*a);
assert_eq!(*a, ["i", "can", "has", "strings?"][..]);
assert_eq!(
gv!("as")
.serialize_to_vec(&["i", "can", "has", "strings?"][..])
.as_slice(),
b"i\0can\0has\0strings?\0\x02\x06\x0a\x13"
);
let aob = gv!("ay").cast([0x04u8, 0x05, 0x06, 0x07].as_aligned());
assert_eq!(aob, &[0x04u8, 0x05, 0x06, 0x07]);
assert_eq!(
gv!("ay")
.serialize_to_vec(&[0x04u8, 0x05, 0x06, 0x07])
.as_slice(),
&[0x04u8, 0x05, 0x06, 0x07]
);
assert_eq!(gv!("ai").from_bytes(b"\x04\0\0\0\x02\x01\0\0"), [4, 258]);
assert_eq!(
gv!("ai").serialize_to_vec(&[4, 258]).as_slice(),
b"\x04\0\0\0\x02\x01\0\0"
);
}
#[test]
fn test_gvariantstr() {
assert_eq!(Str::from_aligned_slice(b"".as_aligned()).to_str(), "");
assert_eq!(Str::from_aligned_slice(b"\0".as_aligned()).to_str(), "");
assert_eq!(
Str::from_aligned_slice(b"hello world\0".as_aligned()).to_str(),
"hello world"
);
assert_eq!(
Str::from_aligned_slice(b"hello world\0".as_aligned()),
"hello world"
);
}
#[test]
fn test_variant() {
let data = copy_to_align(b"\x04\x00\x00n");
let v = Variant::from_aligned_slice(data.as_ref());
match v.split() {
(b"n", d) => assert_eq!(*i16::from_aligned_slice(d.as_aligned()), 4),
(ty, _) => panic!("Incorrect type {:?}", ty),
}
assert_eq!(v, v);
let data_1 = copy_to_align(b"\x00()");
let data_2 = copy_to_align(b"");
assert_eq!(
Variant::from_aligned_slice(data_1.as_ref()),
Variant::from_aligned_slice(data_2.as_ref())
);
let non_normal = Variant::from_aligned_slice(data_1.as_ref());
assert_ne!(non_normal, v);
let x = VariantWrap(gv!("as"), ["hello", "goodbye"].as_ref());
let v = gv!("v").serialize_to_vec(x);
assert_eq!(v, b"hello\0goodbye\0\x06\x0e\0as");
let xv = VariantWrap(gv!("v"), x);
let vv = gv!("v").serialize_to_vec(xv);
assert_eq!(vv, b"hello\0goodbye\0\x06\x0e\0as\0v");
let de_vv: Owned<Variant> = gv!("v").from_bytes(vv);
let de_v = de_vv.get(gv!("v")).unwrap();
let de = de_v.get(gv!("as")).unwrap();
assert_eq!(de, ["hello", "goodbye"].as_ref())
}
}