use crate::any_pointer;
use crate::private::arena::{BuilderArena, BuilderArenaImpl};
use crate::private::arena::{ReaderArena, ReaderArenaImpl};
use crate::private::layout;
use crate::private::units::BYTES_PER_WORD;
use crate::traits::{FromPointerBuilder, SetterInput};
use crate::traits::{FromPointerReader, Owned};
use crate::OutputSegments;
use crate::Result;
#[derive(Clone, Copy, Debug)]
pub struct ReaderOptions {
pub traversal_limit_in_words: Option<usize>,
pub nesting_limit: i32,
}
pub const DEFAULT_READER_OPTIONS: ReaderOptions = ReaderOptions {
traversal_limit_in_words: Some(8 * 1024 * 1024),
nesting_limit: 64,
};
impl Default for ReaderOptions {
fn default() -> Self {
DEFAULT_READER_OPTIONS
}
}
impl ReaderOptions {
pub fn new() -> Self {
DEFAULT_READER_OPTIONS
}
pub fn nesting_limit(&mut self, value: i32) -> &mut Self {
self.nesting_limit = value;
self
}
pub fn traversal_limit_in_words(&mut self, value: Option<usize>) -> &mut Self {
self.traversal_limit_in_words = value;
self
}
}
pub trait ReaderSegments {
fn get_segment(&self, idx: u32) -> Option<&[u8]>;
fn len(&self) -> usize {
for i in 0.. {
if self.get_segment(i as u32).is_none() {
return i;
}
}
unreachable!()
}
fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl<S> ReaderSegments for &S
where
S: ReaderSegments,
{
fn get_segment(&self, idx: u32) -> Option<&[u8]> {
(**self).get_segment(idx)
}
fn len(&self) -> usize {
(**self).len()
}
}
pub struct SegmentArray<'a> {
segments: &'a [&'a [u8]],
}
impl<'a> SegmentArray<'a> {
pub fn new(segments: &'a [&'a [u8]]) -> SegmentArray<'a> {
SegmentArray { segments }
}
}
impl<'b> ReaderSegments for SegmentArray<'b> {
fn get_segment(&self, id: u32) -> Option<&[u8]> {
self.segments.get(id as usize).copied()
}
fn len(&self) -> usize {
self.segments.len()
}
}
impl<'b> ReaderSegments for [&'b [u8]] {
fn get_segment(&self, id: u32) -> Option<&[u8]> {
self.get(id as usize).copied()
}
fn len(&self) -> usize {
self.len()
}
}
pub struct Reader<S>
where
S: ReaderSegments,
{
arena: ReaderArenaImpl<S>,
}
impl<S> Reader<S>
where
S: ReaderSegments,
{
pub fn new(segments: S, options: ReaderOptions) -> Self {
Self {
arena: ReaderArenaImpl::new(segments, options),
}
}
fn get_root_internal(&self) -> Result<any_pointer::Reader<'_>> {
let (segment_start, _seg_len) = self.arena.get_segment(0)?;
let pointer_reader = layout::PointerReader::get_root(
&self.arena,
0,
segment_start,
self.arena.nesting_limit(),
)?;
Ok(any_pointer::Reader::new(pointer_reader))
}
pub fn get_root<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
self.get_root_internal()?.get_as()
}
pub fn into_segments(self) -> S {
self.arena.into_segments()
}
pub fn is_canonical(&self) -> Result<bool> {
let (segment_start, seg_len) = self.arena.get_segment(0)?;
if self.arena.get_segment(1).is_ok() {
return Ok(false);
}
let pointer_reader = layout::PointerReader::get_root(
&self.arena,
0,
segment_start,
self.arena.nesting_limit(),
)?;
let read_head = ::core::cell::Cell::new(unsafe { segment_start.add(BYTES_PER_WORD) });
let root_is_canonical = pointer_reader.is_canonical(&read_head)?;
let all_words_consumed = (read_head.get() as usize - segment_start as usize)
/ BYTES_PER_WORD
== seg_len as usize;
Ok(root_is_canonical && all_words_consumed)
}
#[cfg(feature = "alloc")]
pub fn canonicalize(&self) -> Result<alloc::vec::Vec<crate::Word>> {
let root = self.get_root_internal()?;
let size = root.target_size()?.word_count + 1;
let mut message = Builder::new(HeapAllocator::new().first_segment_words(size as u32));
message.set_root_canonical(root)?;
let output_segments = message.get_segments_for_output();
assert_eq!(1, output_segments.len());
let output = output_segments[0];
assert!((output.len() / BYTES_PER_WORD) as u64 <= size);
let mut result = crate::Word::allocate_zeroed_vec(output.len() / BYTES_PER_WORD);
crate::Word::words_to_bytes_mut(&mut result[..]).copy_from_slice(output);
Ok(result)
}
pub fn into_typed<T: Owned>(self) -> TypedReader<S, T> {
TypedReader::new(self)
}
}
pub struct TypedReader<S, T>
where
S: ReaderSegments,
T: Owned,
{
marker: ::core::marker::PhantomData<T>,
message: Reader<S>,
}
impl<S, T> TypedReader<S, T>
where
S: ReaderSegments,
T: Owned,
{
pub fn new(message: Reader<S>) -> Self {
Self {
marker: ::core::marker::PhantomData,
message,
}
}
pub fn get(&self) -> Result<T::Reader<'_>> {
self.message.get_root()
}
pub fn into_inner(self) -> Reader<S> {
self.message
}
}
impl<S, T> From<Reader<S>> for TypedReader<S, T>
where
S: ReaderSegments,
T: Owned,
{
fn from(message: Reader<S>) -> Self {
Self::new(message)
}
}
impl<A, T> From<Builder<A>> for TypedReader<Builder<A>, T>
where
A: Allocator,
T: Owned,
{
fn from(message: Builder<A>) -> Self {
let reader = message.into_reader();
reader.into_typed()
}
}
impl<A, T> From<TypedBuilder<T, A>> for TypedReader<Builder<A>, T>
where
A: Allocator,
T: Owned,
{
fn from(builder: TypedBuilder<T, A>) -> Self {
builder.into_reader()
}
}
pub unsafe trait Allocator {
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32);
unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32);
}
pub struct Builder<A>
where
A: Allocator,
{
arena: BuilderArenaImpl<A>,
}
unsafe impl<A> Send for Builder<A> where A: Send + Allocator {}
fn _assert_kinds() {
fn _assert_send<T: Send>() {}
fn _assert_reader<S: ReaderSegments + Send>() {
_assert_send::<Reader<S>>();
}
fn _assert_builder<A: Allocator + Send>() {
_assert_send::<Builder<A>>();
}
}
impl<A> Builder<A>
where
A: Allocator,
{
pub fn new(allocator: A) -> Self {
Self {
arena: BuilderArenaImpl::new(allocator),
}
}
fn get_root_internal(&mut self) -> any_pointer::Builder<'_> {
if self.arena.is_empty() {
self.arena
.allocate_segment(1)
.expect("allocate root pointer");
self.arena.allocate(0, 1).expect("allocate root pointer");
}
let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
let location: *mut u8 = seg_start;
let Self { arena } = self;
any_pointer::Builder::new(layout::PointerBuilder::get_root(arena, 0, location))
}
pub fn init_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> T {
let root = self.get_root_internal();
root.init_as()
}
pub fn initn_root<'a, T: FromPointerBuilder<'a>>(&'a mut self, length: u32) -> T {
let root = self.get_root_internal();
root.initn_as(length)
}
pub fn get_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> Result<T> {
let root = self.get_root_internal();
root.get_as()
}
pub fn get_root_as_reader<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
if self.arena.is_empty() {
any_pointer::Reader::new(layout::PointerReader::new_default()).get_as()
} else {
let (segment_start, _segment_len) = self.arena.get_segment(0)?;
let pointer_reader = layout::PointerReader::get_root(
self.arena.as_reader(),
0,
segment_start,
0x7fffffff,
)?;
let root = any_pointer::Reader::new(pointer_reader);
root.get_as()
}
}
pub fn set_root<T: Owned>(&mut self, value: impl SetterInput<T>) -> Result<()> {
let mut root = self.get_root_internal();
root.set_as(value)
}
pub fn set_root_canonical<T: Owned>(&mut self, value: impl SetterInput<T>) -> Result<()> {
if self.arena.is_empty() {
self.arena
.allocate_segment(1)
.expect("allocate root pointer");
self.arena.allocate(0, 1).expect("allocate root pointer");
}
let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
let pointer = layout::PointerBuilder::get_root(&mut self.arena, 0, seg_start);
SetterInput::set_pointer_builder(pointer, value, true)?;
assert_eq!(self.get_segments_for_output().len(), 1);
Ok(())
}
pub fn get_segments_for_output(&self) -> OutputSegments {
self.arena.get_segments_for_output()
}
pub fn into_reader(self) -> Reader<Self> {
Reader::new(
self,
ReaderOptions {
traversal_limit_in_words: None,
nesting_limit: i32::MAX,
},
)
}
pub fn into_typed<T: Owned>(self) -> TypedBuilder<T, A> {
TypedBuilder::new(self)
}
pub fn into_allocator(self) -> A {
self.arena.into_allocator()
}
}
impl<A> ReaderSegments for Builder<A>
where
A: Allocator,
{
fn get_segment(&self, id: u32) -> Option<&[u8]> {
self.get_segments_for_output().get(id as usize).copied()
}
fn len(&self) -> usize {
self.get_segments_for_output().len()
}
}
#[cfg(feature = "alloc")]
pub struct TypedBuilder<T, A = HeapAllocator>
where
T: Owned,
A: Allocator,
{
marker: ::core::marker::PhantomData<T>,
message: Builder<A>,
}
#[cfg(not(feature = "alloc"))]
pub struct TypedBuilder<T, A>
where
T: Owned,
A: Allocator,
{
marker: ::core::marker::PhantomData<T>,
message: Builder<A>,
}
#[cfg(feature = "alloc")]
impl<T> TypedBuilder<T, HeapAllocator>
where
T: Owned,
{
pub fn new_default() -> Self {
Default::default()
}
}
#[cfg(feature = "alloc")]
impl<T> Default for TypedBuilder<T, HeapAllocator>
where
T: Owned,
{
fn default() -> Self {
Self::new(Builder::default())
}
}
impl<T, A> TypedBuilder<T, A>
where
T: Owned,
A: Allocator,
{
pub fn new(message: Builder<A>) -> Self {
Self {
marker: ::core::marker::PhantomData,
message,
}
}
pub fn init_root(&mut self) -> T::Builder<'_> {
self.message.init_root()
}
pub fn initn_root(&mut self, length: u32) -> T::Builder<'_> {
self.message.initn_root(length)
}
pub fn get_root(&mut self) -> Result<T::Builder<'_>> {
self.message.get_root()
}
pub fn get_root_as_reader(&self) -> Result<T::Reader<'_>> {
self.message.get_root_as_reader()
}
pub fn set_root(&mut self, value: T::Reader<'_>) -> Result<()> {
self.message.set_root(value)
}
pub fn into_inner(self) -> Builder<A> {
self.message
}
pub fn borrow_inner(&self) -> &Builder<A> {
&self.message
}
pub fn borrow_inner_mut(&mut self) -> &mut Builder<A> {
&mut self.message
}
pub fn into_reader(self) -> TypedReader<Builder<A>, T> {
TypedReader::new(self.message.into_reader())
}
}
impl<T, A> From<Builder<A>> for TypedBuilder<T, A>
where
T: Owned,
A: Allocator,
{
fn from(builder: Builder<A>) -> Self {
Self::new(builder)
}
}
#[derive(Debug)]
#[cfg(feature = "alloc")]
pub struct HeapAllocator {
next_size: u32,
allocation_strategy: AllocationStrategy,
max_segment_words: u32,
}
#[derive(Clone, Copy, Debug)]
pub enum AllocationStrategy {
FixedSize,
GrowHeuristically,
}
pub const SUGGESTED_FIRST_SEGMENT_WORDS: u32 = 1024;
pub const SUGGESTED_ALLOCATION_STRATEGY: AllocationStrategy = AllocationStrategy::GrowHeuristically;
#[cfg(feature = "alloc")]
impl Default for HeapAllocator {
fn default() -> Self {
Self {
next_size: SUGGESTED_FIRST_SEGMENT_WORDS,
allocation_strategy: SUGGESTED_ALLOCATION_STRATEGY,
max_segment_words: 1 << 29,
}
}
}
#[cfg(feature = "alloc")]
impl HeapAllocator {
pub fn new() -> Self {
Self::default()
}
pub fn first_segment_words(mut self, value: u32) -> Self {
assert!(value <= self.max_segment_words);
self.next_size = value;
self
}
pub fn allocation_strategy(mut self, value: AllocationStrategy) -> Self {
self.allocation_strategy = value;
self
}
pub fn max_segment_words(mut self, value: u32) -> Self {
assert!(self.next_size <= value);
self.max_segment_words = value;
self
}
}
#[cfg(feature = "alloc")]
unsafe impl Allocator for HeapAllocator {
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
let size = core::cmp::max(minimum_size, self.next_size);
let layout =
alloc::alloc::Layout::from_size_align(size as usize * BYTES_PER_WORD, 8).unwrap();
let ptr = unsafe { alloc::alloc::alloc_zeroed(layout) };
if ptr.is_null() {
alloc::alloc::handle_alloc_error(layout);
}
match self.allocation_strategy {
AllocationStrategy::GrowHeuristically => {
if size < self.max_segment_words - self.next_size {
self.next_size += size;
} else {
self.next_size = self.max_segment_words;
}
}
AllocationStrategy::FixedSize => {}
}
(ptr, size)
}
unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, _words_used: u32) {
unsafe {
alloc::alloc::dealloc(
ptr,
alloc::alloc::Layout::from_size_align(word_size as usize * BYTES_PER_WORD, 8)
.unwrap(),
);
}
self.next_size = SUGGESTED_FIRST_SEGMENT_WORDS;
}
}
#[cfg(feature = "alloc")]
#[test]
fn test_allocate_max() {
let allocation_size = 1 << 24;
let mut allocator = HeapAllocator::new()
.max_segment_words((1 << 25) - 1)
.first_segment_words(allocation_size);
let (a1, s1) = allocator.allocate_segment(allocation_size);
let (a2, s2) = allocator.allocate_segment(allocation_size);
let (a3, s3) = allocator.allocate_segment(allocation_size);
assert_eq!(s1, allocation_size);
assert_eq!(s2, allocator.max_segment_words);
assert_eq!(s3, allocator.max_segment_words);
unsafe {
allocator.deallocate_segment(a1, s1, 0);
allocator.deallocate_segment(a2, s2, 0);
allocator.deallocate_segment(a3, s3, 0);
}
}
#[cfg(feature = "alloc")]
impl Builder<HeapAllocator> {
pub fn new_default() -> Self {
Default::default()
}
}
#[cfg(feature = "alloc")]
impl Default for Builder<HeapAllocator> {
fn default() -> Self {
Self::new(HeapAllocator::new())
}
}
#[cfg(feature = "alloc")]
pub struct ScratchSpaceHeapAllocator<'a> {
scratch_space: &'a mut [u8],
scratch_space_allocated: bool,
allocator: HeapAllocator,
}
#[cfg(feature = "alloc")]
impl<'a> ScratchSpaceHeapAllocator<'a> {
pub fn new(scratch_space: &'a mut [u8]) -> ScratchSpaceHeapAllocator<'a> {
#[cfg(not(feature = "unaligned"))]
{
if scratch_space.as_ptr() as usize % BYTES_PER_WORD != 0 {
panic!(
"Scratch space must be 8-byte aligned, or you must enable the \"unaligned\" \
feature in the capnp crate"
);
}
}
for b in &mut scratch_space[..] {
*b = 0;
}
ScratchSpaceHeapAllocator {
scratch_space,
scratch_space_allocated: false,
allocator: HeapAllocator::new(),
}
}
pub fn second_segment_words(self, value: u32) -> ScratchSpaceHeapAllocator<'a> {
ScratchSpaceHeapAllocator {
allocator: self.allocator.first_segment_words(value),
..self
}
}
pub fn allocation_strategy(self, value: AllocationStrategy) -> ScratchSpaceHeapAllocator<'a> {
ScratchSpaceHeapAllocator {
allocator: self.allocator.allocation_strategy(value),
..self
}
}
}
#[cfg(feature = "alloc")]
unsafe impl<'a> Allocator for ScratchSpaceHeapAllocator<'a> {
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
if (minimum_size as usize) < (self.scratch_space.len() / BYTES_PER_WORD)
&& !self.scratch_space_allocated
{
self.scratch_space_allocated = true;
(
self.scratch_space.as_mut_ptr(),
(self.scratch_space.len() / BYTES_PER_WORD) as u32,
)
} else {
self.allocator.allocate_segment(minimum_size)
}
}
unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
let seg_ptr = self.scratch_space.as_mut_ptr();
if ptr == seg_ptr {
unsafe {
core::ptr::write_bytes(
seg_ptr, 0u8,
(words_used as usize) * BYTES_PER_WORD,
);
}
self.scratch_space_allocated = false;
} else {
self.allocator
.deallocate_segment(ptr, word_size, words_used);
}
}
}
pub struct SingleSegmentAllocator<'a> {
segment: &'a mut [u8],
segment_allocated: bool,
}
impl<'a> SingleSegmentAllocator<'a> {
pub fn new(segment: &'a mut [u8]) -> SingleSegmentAllocator<'a> {
#[cfg(not(feature = "unaligned"))]
{
if segment.as_ptr() as usize % BYTES_PER_WORD != 0 {
panic!(
"Segment must be 8-byte aligned, or you must enable the \"unaligned\" \
feature in the capnp crate"
);
}
}
for b in &mut segment[..] {
*b = 0;
}
SingleSegmentAllocator {
segment,
segment_allocated: false,
}
}
}
unsafe impl<'a> Allocator for SingleSegmentAllocator<'a> {
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
let available_word_count = self.segment.len() / BYTES_PER_WORD;
if (minimum_size as usize) > available_word_count {
panic!(
"Allocation too large: asked for {minimum_size} words, \
but only {available_word_count} are available."
)
} else if self.segment_allocated {
panic!("Tried to allocated two segments in a SingleSegmentAllocator.")
} else {
self.segment_allocated = true;
(
self.segment.as_mut_ptr(),
(self.segment.len() / BYTES_PER_WORD) as u32,
)
}
}
unsafe fn deallocate_segment(&mut self, ptr: *mut u8, _word_size: u32, words_used: u32) {
let seg_ptr = self.segment.as_mut_ptr();
if ptr == seg_ptr {
unsafe {
core::ptr::write_bytes(
seg_ptr, 0u8,
(words_used as usize) * BYTES_PER_WORD,
);
}
self.segment_allocated = false;
}
}
}
unsafe impl<'a, A> Allocator for &'a mut A
where
A: Allocator,
{
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
(*self).allocate_segment(minimum_size)
}
unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
(*self).deallocate_segment(ptr, word_size, words_used)
}
}