use core::{fmt, mem, ops::Deref};
use crate::{
Emit, Flat,
buf::Buf,
emitter::{Emitter, Pos},
session::{Brand, Session},
};
#[cfg(feature = "alloc")]
#[must_use]
pub struct Region<T: Flat, B: Buf = crate::buf::AlignedBuf<T>> {
buf: B,
_type: core::marker::PhantomData<T>,
}
#[cfg(not(feature = "alloc"))]
#[must_use]
pub struct Region<T: Flat, B: Buf> {
buf: B,
_type: core::marker::PhantomData<T>,
}
#[cfg(feature = "alloc")]
impl<T: Flat> Region<T> {
pub fn new(builder: impl Emit<T>) -> Self {
Self::new_in(builder)
}
pub fn with_capacity(capacity: u32, builder: impl Emit<T>) -> Self {
Self::with_capacity_in(capacity, builder)
}
}
impl<T: Flat, B: Buf> Region<T, B> {
pub fn new_in(builder: impl Emit<T>) -> Self {
let mut em = Emitter::<T, B>::new();
builder.emit(&mut em);
em.finish()
}
pub fn with_capacity_in(capacity: u32, builder: impl Emit<T>) -> Self {
let mut em = Emitter::<T, B>::with_capacity(capacity);
builder.emit(&mut em);
em.finish()
}
pub(crate) unsafe fn from_buf(buf: B) -> Self {
debug_assert!(buf.len() as usize >= mem::size_of::<T>(), "buffer too small for root type");
Self { buf, _type: core::marker::PhantomData }
}
pub fn session<R>(&mut self, f: impl for<'id> FnOnce(&mut Session<'id, '_, T, B>) -> R) -> R {
self.buf.expose_provenance();
let brand = Brand::new();
let mut session = Session::new(self, brand);
f(&mut session)
}
pub(crate) fn graft_internal<U: Flat, B2: Buf>(&mut self, src: &Region<U, B2>) -> Pos {
self.buf.align_to(B2::ALIGN);
let pos = Pos(self.buf.len());
self.buf.extend_from_slice(src.buf.as_bytes());
pos
}
#[must_use]
pub fn byte_len(&self) -> usize {
self.buf.len() as usize
}
pub(crate) fn deref_raw(&self) -> *const u8 {
self.buf.expose_provenance();
self.buf.as_ptr()
}
pub fn trim(&mut self) {
let new_buf = {
let root: &T = self;
let mut em = Emitter::<T, B>::with_capacity(self.buf.len());
Emit::<T>::emit(root, &mut em);
em.into_buf()
};
self.buf = new_buf;
}
pub(crate) fn reserve_internal(&mut self, additional: u32) {
self.buf.reserve(additional);
}
pub(crate) fn alloc_internal<U: Flat>(&mut self) -> Pos {
self.buf.alloc::<U>()
}
pub(crate) unsafe fn write_flat_internal<U: Flat>(&mut self, at: Pos, val: U) {
unsafe { crate::buf::write_flat(&mut self.buf, at, val) };
}
pub(crate) unsafe fn patch_near_internal(&mut self, at: Pos, target: Pos) {
unsafe { crate::buf::patch_near(&mut self.buf, at, target) };
}
pub(crate) unsafe fn patch_list_header_internal(&mut self, at: Pos, target: Pos, len: u32) {
unsafe { crate::buf::patch_list_header(&mut self.buf, at, target, len) };
}
pub(crate) fn alloc_segment_internal<U: Flat>(&mut self, count: u32) -> Pos {
crate::buf::alloc_segment::<U>(&mut self.buf, count)
}
pub(crate) unsafe fn patch_segment_next_internal(&mut self, seg_pos: Pos, next_seg_pos: Pos) {
unsafe { crate::buf::patch_segment_next(&mut self.buf, seg_pos, next_seg_pos) };
}
pub(crate) unsafe fn write_bytes_internal(&mut self, at: Pos, src: *const u8, len: usize) {
unsafe { crate::buf::write_bytes(&mut self.buf, at, src, len) };
}
#[must_use]
pub fn as_bytes(&self) -> &[u8] {
self.buf.as_bytes()
}
pub fn into_buf(self) -> B {
self.buf
}
#[cfg(feature = "alloc")]
pub fn into_vec(self) -> alloc::vec::Vec<u8> {
self.buf.as_bytes().to_vec()
}
pub fn from_bytes(bytes: &[u8]) -> Result<Self, crate::ValidateError> {
let mut buf = B::empty();
buf.extend_from_slice(bytes);
T::validate(0, buf.as_bytes())?;
Ok(unsafe { Self::from_buf(buf) })
}
pub unsafe fn from_bytes_unchecked(bytes: &[u8]) -> Self {
let mut buf = B::empty();
buf.extend_from_slice(bytes);
unsafe { Self::from_buf(buf) }
}
pub unsafe fn from_buf_unchecked(buf: B) -> Self {
unsafe { Self::from_buf(buf) }
}
}
impl<T: Flat, B: Buf> Deref for Region<T, B> {
type Target = T;
fn deref(&self) -> &T {
self.buf.expose_provenance();
unsafe { &*self.buf.as_ptr().cast::<T>() }
}
}
impl<T: Flat, B: Buf + Clone> Clone for Region<T, B> {
fn clone(&self) -> Self {
Self { buf: self.buf.clone(), _type: core::marker::PhantomData }
}
}
impl<T: Flat + PartialEq, B: Buf> PartialEq for Region<T, B> {
fn eq(&self, other: &Self) -> bool {
**self == **other
}
}
impl<T: Flat + Eq, B: Buf> Eq for Region<T, B> {}
impl<T: Flat + fmt::Debug, B: Buf> fmt::Debug for Region<T, B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Region").field("root", &**self).finish()
}
}
impl<T: Flat + fmt::Display, B: Buf> fmt::Display for Region<T, B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
unsafe impl<T: Flat + Send + Sync, B: Buf + Send> Send for Region<T, B> {}
unsafe impl<T: Flat + Send + Sync, B: Buf + Sync> Sync for Region<T, B> {}
impl<T: Flat, B: Buf> AsRef<[u8]> for Region<T, B> {
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
#[cfg(feature = "alloc")]
impl<T: Flat, B: Buf> From<Region<T, B>> for alloc::vec::Vec<u8> {
fn from(region: Region<T, B>) -> Self {
region.into_vec()
}
}
#[cfg(feature = "serde")]
impl<T: Flat, B: Buf> serde::Serialize for Region<T, B> {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_bytes(self.as_bytes())
}
}
#[cfg(feature = "serde")]
impl<'de, T: Flat, B: Buf> serde::Deserialize<'de> for Region<T, B> {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct RegionVisitor<T, B>(core::marker::PhantomData<(T, B)>);
impl<'de, T: Flat, B: Buf> serde::de::Visitor<'de> for RegionVisitor<T, B> {
type Value = Region<T, B>;
fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.write_str("a valid nearest region byte buffer")
}
fn visit_bytes<E: serde::de::Error>(self, v: &[u8]) -> Result<Self::Value, E> {
Region::from_bytes(v).map_err(E::custom)
}
fn visit_seq<A: serde::de::SeqAccess<'de>>(
self,
mut seq: A,
) -> Result<Self::Value, A::Error> {
let mut bytes = alloc::vec::Vec::with_capacity(seq.size_hint().unwrap_or(0));
while let Some(b) = seq.next_element::<u8>()? {
bytes.push(b);
}
self.visit_byte_buf(bytes)
}
}
deserializer.deserialize_byte_buf(RegionVisitor(core::marker::PhantomData))
}
}