1use crate::any_pointer;
73use crate::private::arena::{BuilderArena, BuilderArenaImpl};
74use crate::private::arena::{ReaderArena, ReaderArenaImpl};
75use crate::private::layout;
76use crate::private::units::BYTES_PER_WORD;
77use crate::traits::{FromPointerBuilder, SetterInput};
78use crate::traits::{FromPointerReader, Owned};
79use crate::OutputSegments;
80use crate::Result;
81
82#[derive(Clone, Copy, Debug)]
84pub struct ReaderOptions {
85 pub traversal_limit_in_words: Option<usize>,
103
104 pub nesting_limit: i32,
113}
114
115#[cfg(not(target_pointer_width = "16"))]
116pub const DEFAULT_READER_OPTIONS: ReaderOptions = ReaderOptions {
117 traversal_limit_in_words: Some(8 * 1024 * 1024),
118 nesting_limit: 64,
119};
120
121#[cfg(target_pointer_width = "16")]
122pub const DEFAULT_READER_OPTIONS: ReaderOptions = ReaderOptions {
123 traversal_limit_in_words: Some(8 * 1024),
124 nesting_limit: 64,
125};
126
127impl Default for ReaderOptions {
128 fn default() -> Self {
129 DEFAULT_READER_OPTIONS
130 }
131}
132
133impl ReaderOptions {
134 pub fn new() -> Self {
135 DEFAULT_READER_OPTIONS
136 }
137
138 pub fn nesting_limit(&mut self, value: i32) -> &mut Self {
139 self.nesting_limit = value;
140 self
141 }
142
143 pub fn traversal_limit_in_words(&mut self, value: Option<usize>) -> &mut Self {
144 self.traversal_limit_in_words = value;
145 self
146 }
147}
148
149pub trait ReaderSegments {
151 fn get_segment(&self, idx: u32) -> Option<&[u8]>;
159
160 fn len(&self) -> usize {
162 for i in 0.. {
163 if self.get_segment(i as u32).is_none() {
164 return i;
165 }
166 }
167 unreachable!()
168 }
169
170 fn is_empty(&self) -> bool {
171 self.len() == 0
172 }
173}
174
175impl<S> ReaderSegments for &S
186where
187 S: ReaderSegments + ?Sized,
188{
189 fn get_segment(&self, idx: u32) -> Option<&[u8]> {
190 (**self).get_segment(idx)
191 }
192
193 fn len(&self) -> usize {
194 (**self).len()
195 }
196
197 fn is_empty(&self) -> bool {
198 (**self).is_empty()
199 }
200}
201
202pub struct SegmentArray<'a> {
204 segments: &'a [&'a [u8]],
205}
206
207impl<'a> SegmentArray<'a> {
208 pub fn new(segments: &'a [&'a [u8]]) -> SegmentArray<'a> {
209 SegmentArray { segments }
210 }
211}
212
213impl ReaderSegments for SegmentArray<'_> {
214 fn get_segment(&self, id: u32) -> Option<&[u8]> {
215 self.segments.get(id as usize).copied()
216 }
217
218 fn len(&self) -> usize {
219 self.segments.len()
220 }
221}
222
223impl<I> ReaderSegments for [I]
224where
225 I: AsRef<[u8]>,
226{
227 fn get_segment(&self, id: u32) -> Option<&[u8]> {
228 self.get(id as usize).map(|i| i.as_ref())
229 }
230
231 fn len(&self) -> usize {
232 self.len()
233 }
234
235 fn is_empty(&self) -> bool {
236 self.is_empty()
237 }
238}
239
240#[cfg(feature = "alloc")]
241impl<I> ReaderSegments for alloc::vec::Vec<I>
242where
243 I: AsRef<[u8]>,
244{
245 fn get_segment(&self, id: u32) -> Option<&[u8]> {
246 self.get(id as usize).map(|i| i.as_ref())
247 }
248
249 fn len(&self) -> usize {
250 self.len()
251 }
252
253 fn is_empty(&self) -> bool {
254 self.is_empty()
255 }
256}
257
258pub struct Reader<S>
260where
261 S: ReaderSegments,
262{
263 arena: ReaderArenaImpl<S>,
264}
265
266impl<S> Reader<S>
267where
268 S: ReaderSegments,
269{
270 pub fn new(segments: S, options: ReaderOptions) -> Self {
271 Self {
272 arena: ReaderArenaImpl::new(segments, options),
273 }
274 }
275
276 fn get_root_internal(&self) -> Result<any_pointer::Reader<'_>> {
277 let (segment_start, _seg_len) = self.arena.get_segment(0)?;
278 let pointer_reader = unsafe {
279 layout::PointerReader::get_root(
280 &self.arena,
281 0,
282 segment_start,
283 self.arena.nesting_limit(),
284 )
285 }?;
286 Ok(any_pointer::Reader::new(pointer_reader))
287 }
288
289 pub fn get_root<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
291 self.get_root_internal()?.get_as()
292 }
293
294 pub fn into_segments(self) -> S {
295 self.arena.into_segments()
296 }
297
298 pub fn is_canonical(&self) -> Result<bool> {
300 let (segment_start, seg_len) = self.arena.get_segment(0)?;
301
302 if self.arena.get_segment(1).is_ok() {
303 return Ok(false);
308 }
309
310 let pointer_reader = unsafe {
311 layout::PointerReader::get_root(
312 &self.arena,
313 0,
314 segment_start,
315 self.arena.nesting_limit(),
316 )
317 }?;
318 let read_head = ::core::cell::Cell::new(unsafe { segment_start.add(BYTES_PER_WORD) });
319 let root_is_canonical = pointer_reader.is_canonical(&read_head)?;
320 let all_words_consumed = (read_head.get() as usize - segment_start as usize)
321 / BYTES_PER_WORD
322 == seg_len as usize;
323 Ok(root_is_canonical && all_words_consumed)
324 }
325
326 #[cfg(feature = "alloc")]
330 pub fn canonicalize(&self) -> Result<alloc::vec::Vec<crate::Word>> {
331 let root = self.get_root_internal()?;
332 let size = root.target_size()?.word_count + 1;
333 let mut message = Builder::new(HeapAllocator::new().first_segment_words(size as u32));
334 message.set_root_canonical(root)?;
335 let output_segments = message.get_segments_for_output();
336 assert_eq!(1, output_segments.len());
337 let output = output_segments[0];
338 assert!((output.len() / BYTES_PER_WORD) as u64 <= size);
339 let mut result = crate::Word::allocate_zeroed_vec(output.len() / BYTES_PER_WORD);
340 crate::Word::words_to_bytes_mut(&mut result[..]).copy_from_slice(output);
341 Ok(result)
342 }
343
344 pub fn into_typed<T: Owned>(self) -> TypedReader<S, T> {
345 TypedReader::new(self)
346 }
347
348 pub fn size_in_words(&self) -> usize {
349 self.arena.size_in_words()
350 }
351
352 pub fn get_segments(&self) -> &S {
354 self.arena.get_segments()
355 }
356}
357
358pub struct TypedReader<S, T>
361where
362 S: ReaderSegments,
363 T: Owned,
364{
365 marker: ::core::marker::PhantomData<T>,
366 message: Reader<S>,
367}
368
369impl<S, T> TypedReader<S, T>
370where
371 S: ReaderSegments,
372 T: Owned,
373{
374 pub fn new(message: Reader<S>) -> Self {
375 Self {
376 marker: ::core::marker::PhantomData,
377 message,
378 }
379 }
380
381 pub fn get(&self) -> Result<T::Reader<'_>> {
382 self.message.get_root()
383 }
384
385 pub fn into_inner(self) -> Reader<S> {
386 self.message
387 }
388
389 pub fn get_segments(&self) -> &S {
391 self.message.get_segments()
392 }
393}
394
395impl<S, T> From<Reader<S>> for TypedReader<S, T>
396where
397 S: ReaderSegments,
398 T: Owned,
399{
400 fn from(message: Reader<S>) -> Self {
401 Self::new(message)
402 }
403}
404
405impl<A, T> From<Builder<A>> for TypedReader<Builder<A>, T>
406where
407 A: Allocator,
408 T: Owned,
409{
410 fn from(message: Builder<A>) -> Self {
411 let reader = message.into_reader();
412 reader.into_typed()
413 }
414}
415
416impl<A, T> From<TypedBuilder<T, A>> for TypedReader<Builder<A>, T>
417where
418 A: Allocator,
419 T: Owned,
420{
421 fn from(builder: TypedBuilder<T, A>) -> Self {
422 builder.into_reader()
423 }
424}
425
426pub unsafe trait Allocator {
442 fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32);
449
450 unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32);
461}
462
463pub struct Builder<A>
465where
466 A: Allocator,
467{
468 arena: BuilderArenaImpl<A>,
469}
470
471fn _assert_kinds() {
472 fn _assert_send<T: Send>() {}
473 fn _assert_sync<T: Sync>() {}
474 fn _assert_reader<S: ReaderSegments + Send>() {
475 _assert_send::<Reader<S>>();
476 }
477 fn _assert_builder_send<A: Allocator + Send>() {
478 _assert_send::<Builder<A>>();
479 }
480 fn _assert_builder_sync<A: Allocator + Sync>() {
481 _assert_sync::<Builder<A>>();
482 }
483}
484
485impl<A> Builder<A>
486where
487 A: Allocator,
488{
489 pub fn new(allocator: A) -> Self {
490 Self {
491 arena: BuilderArenaImpl::new(allocator),
492 }
493 }
494
495 fn get_root_internal(&mut self) -> any_pointer::Builder<'_> {
496 if self.arena.is_empty() {
497 self.arena
498 .allocate_segment(1)
499 .expect("allocate root pointer");
500 self.arena.allocate(0, 1).expect("allocate root pointer");
501 }
502 let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
503 let location: *mut u8 = seg_start;
504 let Self { arena } = self;
505
506 any_pointer::Builder::new(layout::PointerBuilder::get_root(arena, 0, location))
507 }
508
509 pub fn init_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> T {
511 let root = self.get_root_internal();
512 root.init_as()
513 }
514
515 pub fn initn_root<'a, T: FromPointerBuilder<'a>>(&'a mut self, length: u32) -> T {
517 let root = self.get_root_internal();
518 root.initn_as(length)
519 }
520
521 pub fn get_root<'a, T: FromPointerBuilder<'a>>(&'a mut self) -> Result<T> {
523 let root = self.get_root_internal();
524 root.get_as()
525 }
526
527 pub fn get_root_as_reader<'a, T: FromPointerReader<'a>>(&'a self) -> Result<T> {
528 if self.arena.is_empty() {
529 any_pointer::Reader::new(layout::PointerReader::new_default()).get_as()
530 } else {
531 let (segment_start, _segment_len) = self.arena.get_segment(0)?;
532 let pointer_reader = unsafe {
533 layout::PointerReader::get_root(
534 self.arena.as_reader(),
535 0,
536 segment_start,
537 0x7fffffff,
538 )
539 }?;
540 let root = any_pointer::Reader::new(pointer_reader);
541 root.get_as()
542 }
543 }
544
545 pub fn set_root<T: Owned>(&mut self, value: impl SetterInput<T>) -> Result<()> {
547 let mut root = self.get_root_internal();
548 root.set_as(value)
549 }
550
551 pub fn set_root_canonical<T: Owned>(&mut self, value: impl SetterInput<T>) -> Result<()> {
555 if self.arena.is_empty() {
556 self.arena
557 .allocate_segment(1)
558 .expect("allocate root pointer");
559 self.arena.allocate(0, 1).expect("allocate root pointer");
560 }
561 let (seg_start, _seg_len) = self.arena.get_segment_mut(0);
562 let pointer = layout::PointerBuilder::get_root(&mut self.arena, 0, seg_start);
563 SetterInput::set_pointer_builder(pointer, value, true)?;
564 assert_eq!(self.get_segments_for_output().len(), 1);
565 Ok(())
566 }
567
568 pub fn get_segments_for_output(&self) -> OutputSegments<'_> {
569 self.arena.get_segments_for_output()
570 }
571
572 pub fn into_reader(self) -> Reader<Self> {
573 Reader::new(
574 self,
575 ReaderOptions {
576 traversal_limit_in_words: None,
577 nesting_limit: i32::MAX,
578 },
579 )
580 }
581
582 pub fn into_typed<T: Owned>(self) -> TypedBuilder<T, A> {
583 TypedBuilder::new(self)
584 }
585
586 pub fn into_allocator(self) -> A {
589 self.arena.into_allocator()
590 }
591
592 pub fn size_in_words(&self) -> usize {
593 self.arena.size_in_words()
594 }
595}
596
597impl<A> ReaderSegments for Builder<A>
598where
599 A: Allocator,
600{
601 fn get_segment(&self, id: u32) -> Option<&[u8]> {
602 self.get_segments_for_output().get(id as usize).copied()
603 }
604
605 fn len(&self) -> usize {
606 self.get_segments_for_output().len()
607 }
608}
609
610#[cfg(feature = "alloc")]
617pub struct TypedBuilder<T, A = HeapAllocator>
618where
619 T: Owned,
620 A: Allocator,
621{
622 marker: ::core::marker::PhantomData<T>,
623 message: Builder<A>,
624}
625
626#[cfg(not(feature = "alloc"))]
630pub struct TypedBuilder<T, A>
631where
632 T: Owned,
633 A: Allocator,
634{
635 marker: ::core::marker::PhantomData<T>,
636 message: Builder<A>,
637}
638
639#[cfg(feature = "alloc")]
640impl<T> TypedBuilder<T, HeapAllocator>
641where
642 T: Owned,
643{
644 pub fn new_default() -> Self {
645 Default::default()
646 }
647}
648
649#[cfg(feature = "alloc")]
650impl<T> Default for TypedBuilder<T, HeapAllocator>
651where
652 T: Owned,
653{
654 fn default() -> Self {
655 Self::new(Builder::default())
656 }
657}
658
659impl<T, A> TypedBuilder<T, A>
660where
661 T: Owned,
662 A: Allocator,
663{
664 pub fn new(message: Builder<A>) -> Self {
665 Self {
666 marker: ::core::marker::PhantomData,
667 message,
668 }
669 }
670
671 pub fn init_root(&mut self) -> T::Builder<'_> {
672 self.message.init_root()
673 }
674
675 pub fn initn_root(&mut self, length: u32) -> T::Builder<'_> {
676 self.message.initn_root(length)
677 }
678
679 pub fn get_root(&mut self) -> Result<T::Builder<'_>> {
680 self.message.get_root()
681 }
682
683 pub fn get_root_as_reader(&self) -> Result<T::Reader<'_>> {
684 self.message.get_root_as_reader()
685 }
686
687 pub fn set_root(&mut self, value: T::Reader<'_>) -> Result<()> {
688 self.message.set_root(value)
689 }
690
691 pub fn into_inner(self) -> Builder<A> {
692 self.message
693 }
694
695 pub fn borrow_inner(&self) -> &Builder<A> {
696 &self.message
697 }
698
699 pub fn borrow_inner_mut(&mut self) -> &mut Builder<A> {
700 &mut self.message
701 }
702
703 pub fn into_reader(self) -> TypedReader<Builder<A>, T> {
704 TypedReader::new(self.message.into_reader())
705 }
706}
707
708impl<T, A> From<Builder<A>> for TypedBuilder<T, A>
709where
710 T: Owned,
711 A: Allocator,
712{
713 fn from(builder: Builder<A>) -> Self {
714 Self::new(builder)
715 }
716}
717
718#[derive(Debug)]
720#[cfg(feature = "alloc")]
721pub struct HeapAllocator {
722 next_size: u32,
724
725 allocation_strategy: AllocationStrategy,
727
728 max_segment_words: u32,
730}
731
732#[derive(Clone, Copy, Debug)]
733pub enum AllocationStrategy {
734 FixedSize,
737
738 GrowHeuristically,
740}
741
742pub const SUGGESTED_FIRST_SEGMENT_WORDS: u32 = 1024;
743pub const SUGGESTED_ALLOCATION_STRATEGY: AllocationStrategy = AllocationStrategy::GrowHeuristically;
744
745#[cfg(feature = "alloc")]
746impl Default for HeapAllocator {
747 fn default() -> Self {
748 Self {
749 next_size: SUGGESTED_FIRST_SEGMENT_WORDS,
750 allocation_strategy: SUGGESTED_ALLOCATION_STRATEGY,
751 max_segment_words: 1 << 29,
752 }
753 }
754}
755
756#[cfg(feature = "alloc")]
757impl HeapAllocator {
758 pub fn new() -> Self {
759 Self::default()
760 }
761
762 pub fn first_segment_words(mut self, value: u32) -> Self {
764 assert!(value <= self.max_segment_words);
765 self.next_size = value;
766 self
767 }
768
769 pub fn allocation_strategy(mut self, value: AllocationStrategy) -> Self {
771 self.allocation_strategy = value;
772 self
773 }
774
775 pub fn max_segment_words(mut self, value: u32) -> Self {
777 assert!(self.next_size <= value);
778 self.max_segment_words = value;
779 self
780 }
781}
782
783#[cfg(feature = "alloc")]
784unsafe impl Allocator for HeapAllocator {
785 fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
786 let size = core::cmp::max(minimum_size, self.next_size);
787 let layout =
788 alloc::alloc::Layout::from_size_align(size as usize * BYTES_PER_WORD, 8).unwrap();
789 let ptr = unsafe { alloc::alloc::alloc_zeroed(layout) };
790 if ptr.is_null() {
791 alloc::alloc::handle_alloc_error(layout);
792 }
793 match self.allocation_strategy {
794 AllocationStrategy::GrowHeuristically => {
795 if size < self.max_segment_words - self.next_size {
796 self.next_size += size;
797 } else {
798 self.next_size = self.max_segment_words;
799 }
800 }
801 AllocationStrategy::FixedSize => {}
802 }
803 (ptr, size)
804 }
805
806 unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, _words_used: u32) {
807 unsafe {
808 alloc::alloc::dealloc(
809 ptr,
810 alloc::alloc::Layout::from_size_align(word_size as usize * BYTES_PER_WORD, 8)
811 .unwrap(),
812 );
813 }
814 self.next_size = SUGGESTED_FIRST_SEGMENT_WORDS;
815 }
816}
817
818#[cfg(feature = "alloc")]
819#[test]
820fn test_allocate_max() {
821 let allocation_size = 1 << 24;
822 let mut allocator = HeapAllocator::new()
823 .max_segment_words((1 << 25) - 1)
824 .first_segment_words(allocation_size);
825
826 let (a1, s1) = allocator.allocate_segment(allocation_size);
827 let (a2, s2) = allocator.allocate_segment(allocation_size);
828 let (a3, s3) = allocator.allocate_segment(allocation_size);
829
830 assert_eq!(s1, allocation_size);
831
832 assert_eq!(s2, allocator.max_segment_words);
834 assert_eq!(s3, allocator.max_segment_words);
835
836 unsafe {
837 allocator.deallocate_segment(a1, s1, 0);
838 allocator.deallocate_segment(a2, s2, 0);
839 allocator.deallocate_segment(a3, s3, 0);
840 }
841}
842
843#[cfg(feature = "alloc")]
844impl Builder<HeapAllocator> {
845 pub fn new_default() -> Self {
848 Default::default()
849 }
850}
851
852#[cfg(feature = "alloc")]
853impl Default for Builder<HeapAllocator> {
854 fn default() -> Self {
857 Self::new(HeapAllocator::new())
858 }
859}
860
861#[cfg(feature = "alloc")]
872pub struct ScratchSpaceHeapAllocator<'a> {
873 scratch_space: &'a mut [u8],
874 scratch_space_allocated: bool,
875 allocator: HeapAllocator,
876}
877
878#[cfg(feature = "alloc")]
879impl<'a> ScratchSpaceHeapAllocator<'a> {
880 pub fn new(scratch_space: &'a mut [u8]) -> ScratchSpaceHeapAllocator<'a> {
886 #[cfg(not(feature = "unaligned"))]
887 {
888 if scratch_space.as_ptr() as usize % BYTES_PER_WORD != 0 {
889 panic!(
890 "Scratch space must be 8-byte aligned, or you must enable the \"unaligned\" \
891 feature in the capnp crate"
892 );
893 }
894 }
895
896 for b in &mut scratch_space[..] {
898 *b = 0;
899 }
900 ScratchSpaceHeapAllocator {
901 scratch_space,
902 scratch_space_allocated: false,
903 allocator: HeapAllocator::new(),
904 }
905 }
906
907 pub fn second_segment_words(self, value: u32) -> ScratchSpaceHeapAllocator<'a> {
910 ScratchSpaceHeapAllocator {
911 allocator: self.allocator.first_segment_words(value),
912 ..self
913 }
914 }
915
916 pub fn allocation_strategy(self, value: AllocationStrategy) -> ScratchSpaceHeapAllocator<'a> {
918 ScratchSpaceHeapAllocator {
919 allocator: self.allocator.allocation_strategy(value),
920 ..self
921 }
922 }
923}
924
925#[cfg(feature = "alloc")]
926unsafe impl Allocator for ScratchSpaceHeapAllocator<'_> {
927 fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
928 if (minimum_size as usize) < (self.scratch_space.len() / BYTES_PER_WORD)
929 && !self.scratch_space_allocated
930 {
931 self.scratch_space_allocated = true;
932 (
933 self.scratch_space.as_mut_ptr(),
934 (self.scratch_space.len() / BYTES_PER_WORD) as u32,
935 )
936 } else {
937 self.allocator.allocate_segment(minimum_size)
938 }
939 }
940
941 unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
942 let seg_ptr = self.scratch_space.as_mut_ptr();
943 if ptr == seg_ptr {
944 unsafe {
947 core::ptr::write_bytes(
948 seg_ptr, 0u8,
950 (words_used as usize) * BYTES_PER_WORD,
951 );
952 }
953 self.scratch_space_allocated = false;
954 } else {
955 self.allocator
956 .deallocate_segment(ptr, word_size, words_used);
957 }
958 }
959}
960
961pub struct SingleSegmentAllocator<'a> {
976 segment: &'a mut [u8],
977 segment_allocated: bool,
978}
979
980impl<'a> SingleSegmentAllocator<'a> {
981 pub fn new(segment: &'a mut [u8]) -> SingleSegmentAllocator<'a> {
987 #[cfg(not(feature = "unaligned"))]
988 {
989 if segment.as_ptr() as usize % BYTES_PER_WORD != 0 {
990 panic!(
991 "Segment must be 8-byte aligned, or you must enable the \"unaligned\" \
992 feature in the capnp crate"
993 );
994 }
995 }
996
997 for b in &mut segment[..] {
999 *b = 0;
1000 }
1001 SingleSegmentAllocator {
1002 segment,
1003 segment_allocated: false,
1004 }
1005 }
1006}
1007
1008unsafe impl Allocator for SingleSegmentAllocator<'_> {
1009 fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
1010 let available_word_count = self.segment.len() / BYTES_PER_WORD;
1011 if (minimum_size as usize) > available_word_count {
1012 panic!(
1013 "Allocation too large: asked for {minimum_size} words, \
1014 but only {available_word_count} are available."
1015 )
1016 } else if self.segment_allocated {
1017 panic!("Tried to allocated two segments in a SingleSegmentAllocator.")
1018 } else {
1019 self.segment_allocated = true;
1020 (
1021 self.segment.as_mut_ptr(),
1022 (self.segment.len() / BYTES_PER_WORD) as u32,
1023 )
1024 }
1025 }
1026
1027 unsafe fn deallocate_segment(&mut self, ptr: *mut u8, _word_size: u32, words_used: u32) {
1028 let seg_ptr = self.segment.as_mut_ptr();
1029 if ptr == seg_ptr {
1030 unsafe {
1033 core::ptr::write_bytes(
1034 seg_ptr, 0u8,
1036 (words_used as usize) * BYTES_PER_WORD,
1037 );
1038 }
1039 self.segment_allocated = false;
1040 }
1041 }
1042}
1043
1044unsafe impl<A> Allocator for &'_ mut A
1045where
1046 A: Allocator,
1047{
1048 fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
1049 (*self).allocate_segment(minimum_size)
1050 }
1051
1052 unsafe fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
1053 (*self).deallocate_segment(ptr, word_size, words_used)
1054 }
1055}