capnp/
message.rs

1// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
2// Licensed under the MIT License:
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21
22//! Untyped root container for a Cap'n Proto value.
23//!
24//! ## Notes about type specialization
25//! This module provides [TypedReader] and [TypedBuilder] structs which are strongly-typed variants
26//! of [Reader] and [Builder].
27//!
28//! Code autogenerated by capnpc will have an individual module for each of structures and each of
29//! modules will have `Owned` struct which implements [Owned] trait.
30//!
31//! Example from a real auto-generated file:
32//!
33//! ```ignore
34//! pub mod simple_struct {
35//!     #[derive(Copy, Clone)]
36//!     pub struct Owned(());
37//!     impl <'a> ::capnp::traits::Owned<'a> for Owned { type Reader = Reader<'a>; type Builder = Builder<'a>; }
38//!     ....
39//! }
40//! ```
41//!
42//! [TypedReader] and [TypedBuilder] accept generic type parameter `T`. This parameter must be
43//! a corresponding `Owned` type which was auto-generated inside the corresponding module.
44//!
45//! For example, for auto-generated module `crate::test_data::simple_struct` you'd supply
46//! `crate::test_data::simple_struct::Owned` type into [TypedReader]/[TypedBuilder]
47//!
48//! ```ignore
49//! include!(concat!(env!("OUT_DIR"), "/simple_struct_capnp.rs"));
50//!
51//! use capnp::message::{self, TypedBuilder, TypedReader};
52//!
53//! fn main() {
54//!     let mut builder = TypedBuilder::<simple_struct::Owned>::new_default();
55//!     let mut builder_root = builder.init_root();
56//!     builder_root.set_x(10);
57//!     builder_root.set_y(20);
58//!
59//!     let mut buffer = vec![];
60//!     capnp::serialize_packed::write_message(&mut buffer, builder.borrow_inner()).unwrap();
61//!
62//!     let reader = capnp::serialize_packed::read_message(buffer.as_slice(), ReaderOptions::new()).unwrap();
63//!     let typed_reader = TypedReader::<_, simple_struct::Owned>::new(reader);
64//!
65//!     let reader_root = typed_reader.get().unwrap();
66//!     assert_eq!(reader_root.get_x(), 10);
67//!     assert_eq!(reader_root.get_x(), 20);
68//! }
69//!
70//! ```
71
72use crate::any_pointer;
73use crate::private::arena::{BuilderArena, BuilderArenaImpl};
74use crate::private::arena::{ReaderArena, ReaderArenaImpl};
75use crate::private::layout;
76use crate::private::units::BYTES_PER_WORD;
77use crate::traits::{FromPointerBuilder, SetterInput};
78use crate::traits::{FromPointerReader, Owned};
79use crate::OutputSegments;
80use crate::Result;
81
82/// Options controlling how data is read.
83#[derive(Clone, Copy, Debug)]
84pub struct ReaderOptions {
85    /// Limits how many total (8-byte) words of data are allowed to be traversed. Traversal is counted
86    /// when a new struct or list builder is obtained, e.g. from a get() accessor. This means that
87    /// calling the getter for the same sub-struct multiple times will cause it to be double-counted.
88    /// Once the traversal limit is reached, an error will be reported.
89    ///
90    /// This limit exists for security reasons. It is possible for an attacker to construct a message
91    /// in which multiple pointers point at the same location. This is technically invalid, but hard
92    /// to detect. Using such a message, an attacker could cause a message which is small on the wire
93    /// to appear much larger when actually traversed, possibly exhausting server resources leading to
94    /// denial-of-service.
95    ///
96    /// It makes sense to set a traversal limit that is much larger than the underlying message.
97    /// Together with sensible coding practices (e.g. trying to avoid calling sub-object getters
98    /// multiple times, which is expensive anyway), this should provide adequate protection without
99    /// inconvenience.
100    ///
101    /// A traversal limit of `None` means that no limit is enforced.
102    pub traversal_limit_in_words: Option<usize>,
103
104    /// Limits how deeply nested a message structure can be, e.g. structs containing other structs or
105    /// lists of structs.
106    ///
107    /// Like the traversal limit, this limit exists for security reasons. Since it is common to use
108    /// recursive code to traverse recursive data structures, an attacker could easily cause a stack
109    /// overflow by sending a very-depply-nested (or even cyclic) message, without the message even
110    /// being very large. The default limit of 64 is probably low enough to prevent any chance of
111    /// stack overflow, yet high enough that it is never a problem in practice.
112    pub nesting_limit: i32,
113}
114
115#[cfg(not(target_pointer_width = "16"))]
116pub const DEFAULT_READER_OPTIONS: ReaderOptions = ReaderOptions {
117    traversal_limit_in_words: Some(8 * 1024 * 1024),
118    nesting_limit: 64,
119};
120
121#[cfg(target_pointer_width = "16")]
122pub const DEFAULT_READER_OPTIONS: ReaderOptions = ReaderOptions {
123    traversal_limit_in_words: Some(8 * 1024),
124    nesting_limit: 64,
125};
126
127impl Default for ReaderOptions {
128    fn default() -> Self {
129        DEFAULT_READER_OPTIONS
130    }
131}
132
133impl ReaderOptions {
134    pub fn new() -> Self {
135        DEFAULT_READER_OPTIONS
136    }
137
138    pub fn nesting_limit(&mut self, value: i32) -> &mut Self {
139        self.nesting_limit = value;
140        self
141    }
142
143    pub fn traversal_limit_in_words(&mut self, value: Option<usize>) -> &mut Self {
144        self.traversal_limit_in_words = value;
145        self
146    }
147}
148
149/// An object that manages the buffers underlying a Cap'n Proto message reader.
150pub trait ReaderSegments {
151    /// Gets the segment with index `idx`. Returns `None` if `idx` is out of range.
152    ///
153    /// The segment must be 8-byte aligned or the "unaligned" feature must
154    /// be enabled in the capnp crate. (Otherwise reading the segment will return an error.)
155    ///
156    /// The returned slice is required to point to memory that remains valid until the ReaderSegments
157    /// object is dropped. In safe Rust, it should not be possible to violate this requirement.
158    fn get_segment(&self, idx: u32) -> Option<&[u8]>;
159
160    /// Gets the number of segments.
161    fn len(&self) -> usize {
162        for i in 0.. {
163            if self.get_segment(i as u32).is_none() {
164                return i;
165            }
166        }
167        unreachable!()
168    }
169
170    fn is_empty(&self) -> bool {
171        self.len() == 0
172    }
173}
174
175/// Allows stacking of references to a `ReaderSegments`.
176///
177/// This is especially useful with the implementation for slices, as it allows treating
178/// `&[&[u8]]` as `ReaderSegments`, e.g., to construct a `Reader`:
179/// ```
180/// # use capnp::message::ReaderSegments;
181/// use capnp::message::Reader;
182/// let slice_of_slices: &[&[u8]] = &[&*b"some data", &*b"more data"];
183/// let _ = Reader::new(slice_of_slices, Default::default());
184/// ```
185impl<S> ReaderSegments for &S
186where
187    S: ReaderSegments + ?Sized,
188{
189    fn get_segment(&self, idx: u32) -> Option<&[u8]> {
190        (**self).get_segment(idx)
191    }
192
193    fn len(&self) -> usize {
194        (**self).len()
195    }
196
197    fn is_empty(&self) -> bool {
198        (**self).is_empty()
199    }
200}
201
202/// An array of segments.
203pub struct SegmentArray<'a> {
204    segments: &'a [&'a [u8]],
205}
206
207impl<'a> SegmentArray<'a> {
208    pub fn new(segments: &'a [&'a [u8]]) -> SegmentArray<'a> {
209        SegmentArray { segments }
210    }
211}
212
213impl ReaderSegments for SegmentArray<'_> {
214    fn get_segment(&self, id: u32) -> Option<&[u8]> {
215        self.segments.get(id as usize).copied()
216    }
217
218    fn len(&self) -> usize {
219        self.segments.len()
220    }
221}
222
223impl<I> ReaderSegments for [I]
224where
225    I: AsRef<[u8]>,
226{
227    fn get_segment(&self, id: u32) -> Option<&[u8]> {
228        self.get(id as usize).map(|i| i.as_ref())
229    }
230
231    fn len(&self) -> usize {
232        self.len()
233    }
234
235    fn is_empty(&self) -> bool {
236        self.is_empty()
237    }
238}
239
240#[cfg(feature = "alloc")]
241impl<I> ReaderSegments for alloc::vec::Vec<I>
242where
243    I: AsRef<[u8]>,
244{
245    fn get_segment(&self, id: u32) -> Option<&[u8]> {
246        self.get(id as usize).map(|i| i.as_ref())
247    }
248
249    fn len(&self) -> usize {
250        self.len()
251    }
252
253    fn is_empty(&self) -> bool {
254        self.is_empty()
255    }
256}
257
258/// A container used to read a message.
259pub struct Reader<S>
260where
261    S: ReaderSegments,
262{
263    arena: ReaderArenaImpl<S>,
264}
265
266impl<S> Reader<S>
267where
268    S: ReaderSegments,
269{
270    pub fn new(segments: S, options: ReaderOptions) -> Self {
271        Self {
272            arena: ReaderArenaImpl::new(segments, options),
273        }
274    }
275
276    fn get_root_internal(&self) -> Result<any_pointer::Reader<'_>> {
277        let (segment_start, _seg_len) = self.arena.get_segment(0)?;
278        let pointer_reader = unsafe {
279            layout::PointerReader::get_root(
280                &self.arena,
281                0,
282                segment_start,
283                self.arena.nesting_limit(),
284            )
285        }?;
286        Ok(any_pointer::Reader::new(pointer_reader))
287    }
288
289    /// Gets the root of the message, interpreting it as the given type.
290    pub fn get_root<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
291        self.get_root_internal()?.get_as()
292    }
293
294    pub fn into_segments(self) -> S {
295        self.arena.into_segments()
296    }
297
298    /// Checks whether the message is [canonical](https://capnproto.org/encoding.html#canonicalization).
299    pub fn is_canonical(&self) -> Result<bool> {
300        let (segment_start, seg_len) = self.arena.get_segment(0)?;
301
302        if self.arena.get_segment(1).is_ok() {
303            // TODO(cleanup, apibump): should there be a nicer way to ask the arena how many
304            // segments there are?
305
306            // There is more than one segment, so the message cannot be canonical.
307            return Ok(false);
308        }
309
310        let pointer_reader = unsafe {
311            layout::PointerReader::get_root(
312                &self.arena,
313                0,
314                segment_start,
315                self.arena.nesting_limit(),
316            )
317        }?;
318        let read_head = ::core::cell::Cell::new(unsafe { segment_start.add(BYTES_PER_WORD) });
319        let root_is_canonical = pointer_reader.is_canonical(&read_head)?;
320        let all_words_consumed = (read_head.get() as usize - segment_start as usize)
321            / BYTES_PER_WORD
322            == seg_len as usize;
323        Ok(root_is_canonical && all_words_consumed)
324    }
325
326    /// Gets the [canonical](https://capnproto.org/encoding.html#canonicalization) form
327    /// of this message. Works by copying the message twice. For a canonicalization
328    /// method that only requires one copy, see `message::Builder::set_root_canonical()`.
329    #[cfg(feature = "alloc")]
330    pub fn canonicalize(&self) -> Result<alloc::vec::Vec<crate::Word>> {
331        let root = self.get_root_internal()?;
332        let size = root.target_size()?.word_count + 1;
333        let mut message = Builder::new(HeapAllocator::new().first_segment_words(size as u32));
334        message.set_root_canonical(root)?;
335        let output_segments = message.get_segments_for_output();
336        assert_eq!(1, output_segments.len());
337        let output = output_segments[0];
338        assert!((output.len() / BYTES_PER_WORD) as u64 <= size);
339        let mut result = crate::Word::allocate_zeroed_vec(output.len() / BYTES_PER_WORD);
340        crate::Word::words_to_bytes_mut(&mut result[..]).copy_from_slice(output);
341        Ok(result)
342    }
343
344    pub fn into_typed<T: Owned>(self) -> TypedReader<S, T> {
345        TypedReader::new(self)
346    }
347
348    pub fn size_in_words(&self) -> usize {
349        self.arena.size_in_words()
350    }
351
352    /// Retrieves the underlying [`ReaderSegments`] object.
353    pub fn get_segments(&self) -> &S {
354        self.arena.get_segments()
355    }
356}
357
358/// A message reader whose value is known to be of type `T`.
359/// Please see [module documentation](self) for more info about reader type specialization.
360pub struct TypedReader<S, T>
361where
362    S: ReaderSegments,
363    T: Owned,
364{
365    marker: ::core::marker::PhantomData<T>,
366    message: Reader<S>,
367}
368
369impl<S, T> TypedReader<S, T>
370where
371    S: ReaderSegments,
372    T: Owned,
373{
374    pub fn new(message: Reader<S>) -> Self {
375        Self {
376            marker: ::core::marker::PhantomData,
377            message,
378        }
379    }
380
381    pub fn get(&self) -> Result<T::Reader<'_>> {
382        self.message.get_root()
383    }
384
385    pub fn into_inner(self) -> Reader<S> {
386        self.message
387    }
388
389    /// Retrieves the underlying [`ReaderSegments`] object.
390    pub fn get_segments(&self) -> &S {
391        self.message.get_segments()
392    }
393}
394
395impl<S, T> From<Reader<S>> for TypedReader<S, T>
396where
397    S: ReaderSegments,
398    T: Owned,
399{
400    fn from(message: Reader<S>) -> Self {
401        Self::new(message)
402    }
403}
404
405impl<A, T> From<Builder<A>> for TypedReader<Builder<A>, T>
406where
407    A: Allocator,
408    T: Owned,
409{
410    fn from(message: Builder<A>) -> Self {
411        let reader = message.into_reader();
412        reader.into_typed()
413    }
414}
415
416impl<A, T> From<TypedBuilder<T, A>> for TypedReader<Builder<A>, T>
417where
418    A: Allocator,
419    T: Owned,
420{
421    fn from(builder: TypedBuilder<T, A>) -> Self {
422        builder.into_reader()
423    }
424}
425
426/// An object that allocates memory for a Cap'n Proto message as it is being built.
427///
428/// Users of capnproto-rust who wish to provide memory in non-standard ways should
429/// implement this trait. Objects implementing this trait are intended to be wrapped
430/// by `capnp::private::BuilderArena`, which handles calling the methods at the appropriate
431/// times, including calling `deallocate_segment()` on drop.
432///
433/// # Safety
434/// Implementions must ensure all of the following:
435///   1. The memory returned by `allocate_segment` is initialized to all zeroes.
436///   2. The memory returned by `allocate_segment` is valid until `deallocate_segment()`
437///      is called on it.
438///   3. The allocated memory does not overlap with other allocated memory.
439///   4. The allocated memory is 8-byte aligned (or the "unaligned" feature is enabled
440///      for the capnp crate).
441pub unsafe trait Allocator {
442    /// Allocates zeroed memory for a new segment, returning a pointer to the start of the segment
443    /// and a u32 indicating the length of the segment in words. The allocated segment must be
444    /// at least `minimum_size` words long (`minimum_size * 8` bytes long). Allocator implementations
445    /// commonly allocate much more than the minimum, to reduce the total number of segments needed.
446    /// A reasonable strategy is to allocate the maximum of `minimum_size` and twice the size of the
447    /// previous segment.
448    fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32);
449
450    /// Indicates that a segment, previously allocated via allocate_segment(), is no longer in use.
451    /// `word_size` is the length of the segment in words, as returned from `allocate_segment()`.
452    /// `words_used` is always less than or equal to `word_size`, and indicates how many
453    /// words (contiguous from the start of the segment) were possibly written with non-zero values.
454    ///
455    /// # Safety
456    /// Callers must only call this method on a pointer that has previously been been returned
457    /// from `allocate_segment()`, and only once on each such segment. `word_size` must
458    /// equal the word size returned from `allocate_segment()`, and `words_used` must be at
459    /// most `word_size`.
460    unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32);
461}
462
463/// A container used to build a message.
464pub struct Builder<A>
465where
466    A: Allocator,
467{
468    arena: BuilderArenaImpl<A>,
469}
470
471fn _assert_kinds() {
472    fn _assert_send<T: Send>() {}
473    fn _assert_sync<T: Sync>() {}
474    fn _assert_reader<S: ReaderSegments + Send>() {
475        _assert_send::<Reader<S>>();
476    }
477    fn _assert_builder_send<A: Allocator + Send>() {
478        _assert_send::<Builder<A>>();
479    }
480    fn _assert_builder_sync<A: Allocator + Sync>() {
481        _assert_sync::<Builder<A>>();
482    }
483}
484
485impl<A> Builder<A>
486where
487    A: Allocator,
488{
489    pub fn new(allocator: A) -> Self {
490        Self {
491            arena: BuilderArenaImpl::new(allocator),
492        }
493    }
494
495    fn get_root_internal(&mut self) -> any_pointer::Builder<'_> {
496        if self.arena.is_empty() {
497            self.arena
498                .allocate_segment(1)
499                .expect("allocate root pointer");
500            self.arena.allocate(0, 1).expect("allocate root pointer");
501        }
502        let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
503        let location: *mut u8 = seg_start;
504        let Self { arena } = self;
505
506        any_pointer::Builder::new(layout::PointerBuilder::get_root(arena, 0, location))
507    }
508
509    /// Initializes the root as a value of the given type.
510    pub fn init_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> T {
511        let root = self.get_root_internal();
512        root.init_as()
513    }
514
515    /// Initializes the root as a value of the given list type, with the given length.
516    pub fn initn_root<'a, T: FromPointerBuilder<'a>>(&'a mut self, length: u32) -> T {
517        let root = self.get_root_internal();
518        root.initn_as(length)
519    }
520
521    /// Gets the root, interpreting it as the given type.
522    pub fn get_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> Result<T> {
523        let root = self.get_root_internal();
524        root.get_as()
525    }
526
527    pub fn get_root_as_reader<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
528        if self.arena.is_empty() {
529            any_pointer::Reader::new(layout::PointerReader::new_default()).get_as()
530        } else {
531            let (segment_start, _segment_len) = self.arena.get_segment(0)?;
532            let pointer_reader = unsafe {
533                layout::PointerReader::get_root(
534                    self.arena.as_reader(),
535                    0,
536                    segment_start,
537                    0x7fffffff,
538                )
539            }?;
540            let root = any_pointer::Reader::new(pointer_reader);
541            root.get_as()
542        }
543    }
544
545    /// Sets the root to a deep copy of the given value.
546    pub fn set_root<T: Owned>(&mut self, value: impl SetterInput<T>) -> Result<()> {
547        let mut root = self.get_root_internal();
548        root.set_as(value)
549    }
550
551    /// Sets the root to a canonicalized version of `value`. If this was the first action taken
552    /// on this `Builder`, then a subsequent call to `get_segments_for_output()` should return
553    /// a single segment, containing the full canonicalized message.
554    pub fn set_root_canonical<T: Owned>(&mut self, value: impl SetterInput<T>) -> Result<()> {
555        if self.arena.is_empty() {
556            self.arena
557                .allocate_segment(1)
558                .expect("allocate root pointer");
559            self.arena.allocate(0, 1).expect("allocate root pointer");
560        }
561        let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
562        let pointer = layout::PointerBuilder::get_root(&mut self.arena, 0, seg_start);
563        SetterInput::set_pointer_builder(pointer, value, true)?;
564        assert_eq!(self.get_segments_for_output().len(), 1);
565        Ok(())
566    }
567
568    pub fn get_segments_for_output(&self) -> OutputSegments<'_> {
569        self.arena.get_segments_for_output()
570    }
571
572    pub fn into_reader(self) -> Reader<Self> {
573        Reader::new(
574            self,
575            ReaderOptions {
576                traversal_limit_in_words: None,
577                nesting_limit: i32::MAX,
578            },
579        )
580    }
581
582    pub fn into_typed<T: Owned>(self) -> TypedBuilder<T, A> {
583        TypedBuilder::new(self)
584    }
585
586    /// Retrieves the underlying `Allocator`, deallocating all currently-allocated
587    /// segments.
588    pub fn into_allocator(self) -> A {
589        self.arena.into_allocator()
590    }
591
592    pub fn size_in_words(&self) -> usize {
593        self.arena.size_in_words()
594    }
595}
596
597impl<A> ReaderSegments for Builder<A>
598where
599    A: Allocator,
600{
601    fn get_segment(&self, id: u32) -> Option<&[u8]> {
602        self.get_segments_for_output().get(id as usize).copied()
603    }
604
605    fn len(&self) -> usize {
606        self.get_segments_for_output().len()
607    }
608}
609
610/// Stongly typed variant of the [Builder]
611///
612/// Generic type parameters:
613/// - `T` - type of the capnp message which this builder is specialized on. Please see
614///   [module documentation](self) for more info about builder type specialization.
615/// - `A` - type of allocator
616#[cfg(feature = "alloc")]
617pub struct TypedBuilder<T, A = HeapAllocator>
618where
619    T: Owned,
620    A: Allocator,
621{
622    marker: ::core::marker::PhantomData<T>,
623    message: Builder<A>,
624}
625
626// Defined separately because the A=HeapAllocator default type
627// argument is not allowed in no-alloc mode.
628// TODO(apibump): remove the A=HeapAllocator thing above?
629#[cfg(not(feature = "alloc"))]
630pub struct TypedBuilder<T, A>
631where
632    T: Owned,
633    A: Allocator,
634{
635    marker: ::core::marker::PhantomData<T>,
636    message: Builder<A>,
637}
638
639#[cfg(feature = "alloc")]
640impl<T> TypedBuilder<T, HeapAllocator>
641where
642    T: Owned,
643{
644    pub fn new_default() -> Self {
645        Default::default()
646    }
647}
648
649#[cfg(feature = "alloc")]
650impl<T> Default for TypedBuilder<T, HeapAllocator>
651where
652    T: Owned,
653{
654    fn default() -> Self {
655        Self::new(Builder::default())
656    }
657}
658
659impl<T, A> TypedBuilder<T, A>
660where
661    T: Owned,
662    A: Allocator,
663{
664    pub fn new(message: Builder<A>) -> Self {
665        Self {
666            marker: ::core::marker::PhantomData,
667            message,
668        }
669    }
670
671    pub fn init_root(&mut self) -> T::Builder<'_> {
672        self.message.init_root()
673    }
674
675    pub fn initn_root(&mut self, length: u32) -> T::Builder<'_> {
676        self.message.initn_root(length)
677    }
678
679    pub fn get_root(&mut self) -> Result<T::Builder<'_>> {
680        self.message.get_root()
681    }
682
683    pub fn get_root_as_reader(&self) -> Result<T::Reader<'_>> {
684        self.message.get_root_as_reader()
685    }
686
687    pub fn set_root(&mut self, value: T::Reader<'_>) -> Result<()> {
688        self.message.set_root(value)
689    }
690
691    pub fn into_inner(self) -> Builder<A> {
692        self.message
693    }
694
695    pub fn borrow_inner(&self) -> &Builder<A> {
696        &self.message
697    }
698
699    pub fn borrow_inner_mut(&mut self) -> &mut Builder<A> {
700        &mut self.message
701    }
702
703    pub fn into_reader(self) -> TypedReader<Builder<A>, T> {
704        TypedReader::new(self.message.into_reader())
705    }
706}
707
708impl<T, A> From<Builder<A>> for TypedBuilder<T, A>
709where
710    T: Owned,
711    A: Allocator,
712{
713    fn from(builder: Builder<A>) -> Self {
714        Self::new(builder)
715    }
716}
717
718/// Standard segment allocator. Allocates each segment via `alloc::alloc::alloc_zeroed()`.
719#[derive(Debug)]
720#[cfg(feature = "alloc")]
721pub struct HeapAllocator {
722    // Minimum number of words in the next allocation.
723    next_size: u32,
724
725    // How to update next_size after an allocation.
726    allocation_strategy: AllocationStrategy,
727
728    // Maximum number of words to allocate.
729    max_segment_words: u32,
730}
731
732#[derive(Clone, Copy, Debug)]
733pub enum AllocationStrategy {
734    /// Allocates the same number of words for each segment, to the extent possible.
735    /// This strategy is primarily useful for testing cross-segment pointers.
736    FixedSize,
737
738    /// Increases segment size by a multiplicative factor for each subsequent segment.
739    GrowHeuristically,
740}
741
742pub const SUGGESTED_FIRST_SEGMENT_WORDS: u32 = 1024;
743pub const SUGGESTED_ALLOCATION_STRATEGY: AllocationStrategy = AllocationStrategy::GrowHeuristically;
744
745#[cfg(feature = "alloc")]
746impl Default for HeapAllocator {
747    fn default() -> Self {
748        Self {
749            next_size: SUGGESTED_FIRST_SEGMENT_WORDS,
750            allocation_strategy: SUGGESTED_ALLOCATION_STRATEGY,
751            max_segment_words: 1 << 29,
752        }
753    }
754}
755
756#[cfg(feature = "alloc")]
757impl HeapAllocator {
758    pub fn new() -> Self {
759        Self::default()
760    }
761
762    /// Sets the size of the initial segment in words, where 1 word = 8 bytes.
763    pub fn first_segment_words(mut self, value: u32) -> Self {
764        assert!(value <= self.max_segment_words);
765        self.next_size = value;
766        self
767    }
768
769    /// Sets the allocation strategy for segments after the first one.
770    pub fn allocation_strategy(mut self, value: AllocationStrategy) -> Self {
771        self.allocation_strategy = value;
772        self
773    }
774
775    /// Sets the maximum number of words allowed in a single allocation.
776    pub fn max_segment_words(mut self, value: u32) -> Self {
777        assert!(self.next_size <= value);
778        self.max_segment_words = value;
779        self
780    }
781}
782
783#[cfg(feature = "alloc")]
784unsafe impl Allocator for HeapAllocator {
785    fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
786        let size = core::cmp::max(minimum_size, self.next_size);
787        let layout =
788            alloc::alloc::Layout::from_size_align(size as usize * BYTES_PER_WORD, 8).unwrap();
789        let ptr = unsafe { alloc::alloc::alloc_zeroed(layout) };
790        if ptr.is_null() {
791            alloc::alloc::handle_alloc_error(layout);
792        }
793        match self.allocation_strategy {
794            AllocationStrategy::GrowHeuristically => {
795                if size < self.max_segment_words - self.next_size {
796                    self.next_size += size;
797                } else {
798                    self.next_size = self.max_segment_words;
799                }
800            }
801            AllocationStrategy::FixedSize => {}
802        }
803        (ptr, size)
804    }
805
806    unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, _words_used: u32) {
807        unsafe {
808            alloc::alloc::dealloc(
809                ptr,
810                alloc::alloc::Layout::from_size_align(word_size as usize * BYTES_PER_WORD, 8)
811                    .unwrap(),
812            );
813        }
814        self.next_size = SUGGESTED_FIRST_SEGMENT_WORDS;
815    }
816}
817
818#[cfg(feature = "alloc")]
819#[test]
820fn test_allocate_max() {
821    let allocation_size = 1 << 24;
822    let mut allocator = HeapAllocator::new()
823        .max_segment_words((1 << 25) - 1)
824        .first_segment_words(allocation_size);
825
826    let (a1, s1) = allocator.allocate_segment(allocation_size);
827    let (a2, s2) = allocator.allocate_segment(allocation_size);
828    let (a3, s3) = allocator.allocate_segment(allocation_size);
829
830    assert_eq!(s1, allocation_size);
831
832    // Allocation size tops out at max_segment_words.
833    assert_eq!(s2, allocator.max_segment_words);
834    assert_eq!(s3, allocator.max_segment_words);
835
836    unsafe {
837        allocator.deallocate_segment(a1, s1, 0);
838        allocator.deallocate_segment(a2, s2, 0);
839        allocator.deallocate_segment(a3, s3, 0);
840    }
841}
842
843#[cfg(feature = "alloc")]
844impl Builder<HeapAllocator> {
845    /// Constructs a new `message::Builder<HeapAllocator>` whose first segment has length
846    /// `SUGGESTED_FIRST_SEGMENT_WORDS`.
847    pub fn new_default() -> Self {
848        Default::default()
849    }
850}
851
852#[cfg(feature = "alloc")]
853impl Default for Builder<HeapAllocator> {
854    /// Constructs a new `message::Builder<HeapAllocator>` whose first segment has length
855    /// `SUGGESTED_FIRST_SEGMENT_WORDS`.
856    fn default() -> Self {
857        Self::new(HeapAllocator::new())
858    }
859}
860
861/// An Allocator whose first segment is a backed by a user-provided buffer.
862///
863/// Recall that an `Allocator` implementation must ensure that allocated segments are
864/// initially *zeroed*. `ScratchSpaceHeapAllocator` ensures that is the case by zeroing
865/// the entire buffer upon initial construction, and then zeroing any *potentially used*
866/// part of the buffer upon `deallocate_segment()`.
867///
868/// You can reuse a `ScratchSpaceHeapAllocator` by calling `message::Builder::into_allocator()`,
869/// or by initially passing it to `message::Builder::new()` as a `&mut ScratchSpaceHeapAllocator`.
870/// Such reuse can save significant amounts of zeroing.
871#[cfg(feature = "alloc")]
872pub struct ScratchSpaceHeapAllocator<'a> {
873    scratch_space: &'a mut [u8],
874    scratch_space_allocated: bool,
875    allocator: HeapAllocator,
876}
877
878#[cfg(feature = "alloc")]
879impl<'a> ScratchSpaceHeapAllocator<'a> {
880    /// Writes zeroes into the entire buffer and constructs a new allocator from it.
881    ///
882    /// If the buffer is large, this operation could be relatively expensive. If you want to reuse
883    /// the same scratch space in a later message, you should reuse the entire
884    /// `ScratchSpaceHeapAllocator`, to avoid paying this full cost again.
885    pub fn new(scratch_space: &'a mut [u8]) -> ScratchSpaceHeapAllocator<'a> {
886        #[cfg(not(feature = "unaligned"))]
887        {
888            if scratch_space.as_ptr() as usize % BYTES_PER_WORD != 0 {
889                panic!(
890                    "Scratch space must be 8-byte aligned, or you must enable the \"unaligned\" \
891                        feature in the capnp crate"
892                );
893            }
894        }
895
896        // We need to ensure that the buffer is zeroed.
897        for b in &mut scratch_space[..] {
898            *b = 0;
899        }
900        ScratchSpaceHeapAllocator {
901            scratch_space,
902            scratch_space_allocated: false,
903            allocator: HeapAllocator::new(),
904        }
905    }
906
907    /// Sets the size of the second segment in words, where 1 word = 8 bytes.
908    /// (The first segment is the scratch space passed to `ScratchSpaceHeapAllocator::new()`.
909    pub fn second_segment_words(self, value: u32) -> ScratchSpaceHeapAllocator<'a> {
910        ScratchSpaceHeapAllocator {
911            allocator: self.allocator.first_segment_words(value),
912            ..self
913        }
914    }
915
916    /// Sets the allocation strategy for segments after the second one.
917    pub fn allocation_strategy(self, value: AllocationStrategy) -> ScratchSpaceHeapAllocator<'a> {
918        ScratchSpaceHeapAllocator {
919            allocator: self.allocator.allocation_strategy(value),
920            ..self
921        }
922    }
923}
924
925#[cfg(feature = "alloc")]
926unsafe impl Allocator for ScratchSpaceHeapAllocator<'_> {
927    fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
928        if (minimum_size as usize) < (self.scratch_space.len() / BYTES_PER_WORD)
929            && !self.scratch_space_allocated
930        {
931            self.scratch_space_allocated = true;
932            (
933                self.scratch_space.as_mut_ptr(),
934                (self.scratch_space.len() / BYTES_PER_WORD) as u32,
935            )
936        } else {
937            self.allocator.allocate_segment(minimum_size)
938        }
939    }
940
941    unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
942        let seg_ptr = self.scratch_space.as_mut_ptr();
943        if ptr == seg_ptr {
944            // Rezero the slice to allow reuse of the allocator. We only need to write
945            // words that we know might contain nonzero values.
946            unsafe {
947                core::ptr::write_bytes(
948                    seg_ptr, // miri isn't happy if we use ptr instead
949                    0u8,
950                    (words_used as usize) * BYTES_PER_WORD,
951                );
952            }
953            self.scratch_space_allocated = false;
954        } else {
955            self.allocator
956                .deallocate_segment(ptr, word_size, words_used);
957        }
958    }
959}
960
961/// An Allocator whose first and only segment is a backed by a user-provided buffer.
962/// If the segment fills up, subsequent allocations trigger panics.
963///
964/// The main purpose of this struct is to be used in situations where heap allocation
965/// is not available.
966///
967/// Recall that an `Allocator` implementation must ensure that allocated segments are
968/// initially *zeroed*. `SingleSegmentAllocator` ensures that is the case by zeroing
969/// the entire buffer upon initial construction, and then zeroing any *potentially used*
970/// part of the buffer upon `deallocate_segment()`.
971///
972/// You can reuse a `SingleSegmentAllocator` by calling `message::Builder::into_allocator()`,
973/// or by initially passing it to `message::Builder::new()` as a `&mut SingleSegmentAllocator`.
974/// Such reuse can save significant amounts of zeroing.
975pub struct SingleSegmentAllocator<'a> {
976    segment: &'a mut [u8],
977    segment_allocated: bool,
978}
979
980impl<'a> SingleSegmentAllocator<'a> {
981    /// Writes zeroes into the entire buffer and constructs a new allocator from it.
982    ///
983    /// If the buffer is large, this operation could be relatively expensive. If you want to reuse
984    /// the same scratch space in a later message, you should reuse the entire
985    /// `SingleSegmentAllocator`, to avoid paying this full cost again.
986    pub fn new(segment: &'a mut [u8]) -> SingleSegmentAllocator<'a> {
987        #[cfg(not(feature = "unaligned"))]
988        {
989            if segment.as_ptr() as usize % BYTES_PER_WORD != 0 {
990                panic!(
991                    "Segment must be 8-byte aligned, or you must enable the \"unaligned\" \
992                        feature in the capnp crate"
993                );
994            }
995        }
996
997        // We need to ensure that the buffer is zeroed.
998        for b in &mut segment[..] {
999            *b = 0;
1000        }
1001        SingleSegmentAllocator {
1002            segment,
1003            segment_allocated: false,
1004        }
1005    }
1006}
1007
1008unsafe impl Allocator for SingleSegmentAllocator<'_> {
1009    fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
1010        let available_word_count = self.segment.len() / BYTES_PER_WORD;
1011        if (minimum_size as usize) > available_word_count {
1012            panic!(
1013                "Allocation too large: asked for {minimum_size} words, \
1014                    but only {available_word_count} are available."
1015            )
1016        } else if self.segment_allocated {
1017            panic!("Tried to allocated two segments in a SingleSegmentAllocator.")
1018        } else {
1019            self.segment_allocated = true;
1020            (
1021                self.segment.as_mut_ptr(),
1022                (self.segment.len() / BYTES_PER_WORD) as u32,
1023            )
1024        }
1025    }
1026
1027    unsafe fn deallocate_segment(&mut self, ptr: *mut u8, _word_size: u32, words_used: u32) {
1028        let seg_ptr = self.segment.as_mut_ptr();
1029        if ptr == seg_ptr {
1030            // Rezero the slice to allow reuse of the allocator. We only need to write
1031            // words that we know might contain nonzero values.
1032            unsafe {
1033                core::ptr::write_bytes(
1034                    seg_ptr, // miri isn't happy if we use ptr instead
1035                    0u8,
1036                    (words_used as usize) * BYTES_PER_WORD,
1037                );
1038            }
1039            self.segment_allocated = false;
1040        }
1041    }
1042}
1043
1044unsafe impl<A> Allocator for &'_ mut A
1045where
1046    A: Allocator,
1047{
1048    fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
1049        (*self).allocate_segment(minimum_size)
1050    }
1051
1052    unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
1053        (*self).deallocate_segment(ptr, word_size, words_used)
1054    }
1055}