1use core::cell::Cell;
23use core::mem;
24use core::ptr;
25
26use crate::data;
27use crate::private::arena::{BuilderArena, NullArena, ReaderArena, SegmentId};
28#[cfg(feature = "alloc")]
29use crate::private::capability::ClientHook;
30use crate::private::mask::Mask;
31use crate::private::primitive::{Primitive, WireValue};
32use crate::private::units::*;
33use crate::private::zero;
34use crate::text;
35use crate::{Error, ErrorKind, MessageSize, Result};
36
37pub use self::ElementSize::{
38 Bit, Byte, EightBytes, FourBytes, InlineComposite, Pointer, TwoBytes, Void,
39};
40
41#[repr(u8)]
42#[derive(Clone, Copy, Debug, PartialEq)]
43pub enum ElementSize {
44 Void = 0,
45 Bit = 1,
46 Byte = 2,
47 TwoBytes = 3,
48 FourBytes = 4,
49 EightBytes = 5,
50 Pointer = 6,
51 InlineComposite = 7,
52}
53
54impl ElementSize {
55 fn from(val: u8) -> Self {
56 match val {
57 0 => Self::Void,
58 1 => Self::Bit,
59 2 => Self::Byte,
60 3 => Self::TwoBytes,
61 4 => Self::FourBytes,
62 5 => Self::EightBytes,
63 6 => Self::Pointer,
64 7 => Self::InlineComposite,
65 _ => panic!("illegal element size: {val}"),
66 }
67 }
68}
69
70pub fn data_bits_per_element(size: ElementSize) -> BitCount32 {
71 match size {
72 Void => 0,
73 Bit => 1,
74 Byte => 8,
75 TwoBytes => 16,
76 FourBytes => 32,
77 EightBytes => 64,
78 Pointer => 0,
79 InlineComposite => 0,
80 }
81}
82
83pub fn pointers_per_element(size: ElementSize) -> WirePointerCount32 {
84 match size {
85 Pointer => 1,
86 _ => 0,
87 }
88}
89
90#[derive(Clone, Copy, Debug)]
91pub struct StructSize {
92 pub data: WordCount16,
93 pub pointers: WirePointerCount16,
94}
95
96impl StructSize {
97 pub fn total(&self) -> WordCount32 {
98 u32::from(self.data) + u32::from(self.pointers) * WORDS_PER_POINTER as WordCount32
99 }
100}
101
102#[repr(u8)]
103#[derive(Clone, Copy, PartialEq)]
104pub enum WirePointerKind {
105 Struct = 0,
106 List = 1,
107 Far = 2,
108 Other = 3,
109}
110
111pub enum PointerType {
112 Null,
113 Struct,
114 List,
115 Capability,
116}
117
118impl WirePointerKind {
119 fn from(val: u8) -> Self {
120 match val {
121 0 => Self::Struct,
122 1 => Self::List,
123 2 => Self::Far,
124 3 => Self::Other,
125 _ => panic!("illegal element size: {val}"),
126 }
127 }
128}
129
130#[repr(C)]
131pub struct WirePointer {
132 offset_and_kind: WireValue<u32>,
133 upper32bits: WireValue<u32>,
134}
135
136#[test]
137#[cfg(feature = "unaligned")]
138fn wire_pointer_align() {
139 assert_eq!(core::mem::align_of::<WirePointer>(), 1);
141}
142
143impl WirePointer {
144 #[inline]
145 pub fn kind(&self) -> WirePointerKind {
146 WirePointerKind::from(self.offset_and_kind.get() as u8 & 3)
147 }
148
149 #[inline]
150 pub fn is_positional(&self) -> bool {
151 (self.offset_and_kind.get() & 2) == 0 }
153
154 #[inline]
155 pub fn is_capability(&self) -> bool {
156 self.offset_and_kind.get() == WirePointerKind::Other as u32
157 }
158
159 #[inline]
160 pub unsafe fn target(ptr: *const Self) -> *const u8 {
161 let this_addr: *const u8 = ptr as *const _;
162 unsafe { this_addr.offset(8 * (1 + (((*ptr).offset_and_kind.get() as i32) >> 2)) as isize) }
163 }
164
165 #[inline]
168 fn target_from_segment(
169 ptr: *const Self,
170 arena: &dyn ReaderArena,
171 segment_id: u32,
172 ) -> Result<*const u8> {
173 let this_addr: *const u8 = ptr as *const _;
174 unsafe {
175 let offset = 1 + (((*ptr).offset_and_kind.get() as i32) >> 2);
176 arena.check_offset(segment_id, this_addr, offset)
177 }
178 }
179
180 #[inline]
183 fn mut_target(ptr: *mut Self) -> *mut u8 {
184 let this_addr: *mut u8 = ptr as *mut _;
185 unsafe {
186 this_addr.wrapping_offset(
187 BYTES_PER_WORD as isize
188 * (1 + (((*ptr).offset_and_kind.get() as i32) >> 2)) as isize,
189 )
190 }
191 }
192
193 #[inline]
194 pub fn set_kind_and_target(&mut self, kind: WirePointerKind, target: *mut u8) {
195 let this_addr: isize = self as *const _ as isize;
196 let target_addr: isize = target as *const _ as isize;
197 self.offset_and_kind.set(
198 ((((target_addr - this_addr) / BYTES_PER_WORD as isize) as i32 - 1) << 2) as u32
199 | (kind as u32),
200 )
201 }
202
203 #[inline]
204 pub fn set_kind_with_zero_offset(&mut self, kind: WirePointerKind) {
205 self.offset_and_kind.set(kind as u32)
206 }
207
208 #[inline]
209 pub fn set_kind_and_target_for_empty_struct(&mut self) {
210 self.offset_and_kind.set(0xfffffffc);
220 }
221
222 #[inline]
223 pub fn inline_composite_list_element_count(&self) -> ElementCount32 {
224 self.offset_and_kind.get() >> 2
225 }
226
227 #[inline]
228 pub fn set_kind_and_inline_composite_list_element_count(
229 &mut self,
230 kind: WirePointerKind,
231 element_count: ElementCount32,
232 ) {
233 self.offset_and_kind
234 .set((element_count << 2) | (kind as u32))
235 }
236
237 #[inline]
238 pub fn far_position_in_segment(&self) -> WordCount32 {
239 self.offset_and_kind.get() >> 3
240 }
241
242 #[inline]
243 pub fn is_double_far(&self) -> bool {
244 ((self.offset_and_kind.get() >> 2) & 1) != 0
245 }
246
247 #[inline]
248 pub fn set_far(&mut self, is_double_far: bool, pos: WordCount32) {
249 self.offset_and_kind
250 .set((pos << 3) | (u32::from(is_double_far) << 2) | WirePointerKind::Far as u32);
251 }
252
253 #[inline]
254 pub fn set_cap(&mut self, index: u32) {
255 self.offset_and_kind.set(WirePointerKind::Other as u32);
256 self.upper32bits.set(index);
257 }
258
259 #[inline]
260 pub fn struct_data_size(&self) -> WordCount16 {
261 self.upper32bits.get() as WordCount16
262 }
263
264 #[inline]
265 pub fn struct_ptr_count(&self) -> WordCount16 {
266 (self.upper32bits.get() >> 16) as WordCount16
267 }
268
269 #[inline]
270 pub fn struct_word_size(&self) -> WordCount32 {
271 u32::from(self.struct_data_size())
272 + u32::from(self.struct_ptr_count()) * WORDS_PER_POINTER as u32
273 }
274
275 #[inline]
276 pub fn set_struct_size(&mut self, size: StructSize) {
277 self.upper32bits
278 .set(u32::from(size.data) | (u32::from(size.pointers) << 16))
279 }
280
281 #[inline]
282 pub fn set_struct_size_from_pieces(&mut self, ds: WordCount16, rc: WirePointerCount16) {
283 self.set_struct_size(StructSize {
284 data: ds,
285 pointers: rc,
286 })
287 }
288
289 #[inline]
290 pub fn list_element_size(&self) -> ElementSize {
291 ElementSize::from(self.upper32bits.get() as u8 & 7)
292 }
293
294 #[inline]
295 pub fn list_element_count(&self) -> ElementCount32 {
296 self.upper32bits.get() >> 3
297 }
298
299 #[inline]
300 pub fn list_inline_composite_word_count(&self) -> WordCount32 {
301 self.list_element_count()
302 }
303
304 #[inline]
305 pub fn set_list_size_and_count(&mut self, es: ElementSize, ec: ElementCount32) {
306 assert!(ec < (1 << 29), "Lists are limited to 2**29 elements");
307 self.upper32bits.set((ec << 3) | (es as u32));
308 }
309
310 #[inline]
311 pub fn set_list_inline_composite(&mut self, wc: WordCount32) {
312 assert!(
313 wc < (1 << 29),
314 "Inline composite lists are limited to 2**29 words"
315 );
316 self.upper32bits.set((wc << 3) | (InlineComposite as u32));
317 }
318
319 #[inline]
320 pub fn far_segment_id(&self) -> SegmentId {
321 self.upper32bits.get() as SegmentId
322 }
323
324 #[inline]
325 pub fn set_far_segment_id(&mut self, si: SegmentId) {
326 self.upper32bits.set(si)
327 }
328
329 #[inline]
330 pub fn cap_index(&self) -> u32 {
331 self.upper32bits.get()
332 }
333
334 #[inline]
335 pub fn set_cap_index(&mut self, index: u32) {
336 self.upper32bits.set(index)
337 }
338
339 #[inline]
340 pub fn is_null(&self) -> bool {
341 self.offset_and_kind.get() == 0 && self.upper32bits.get() == 0
342 }
343}
344
345mod wire_helpers {
346 use core::{ptr, slice};
347
348 use crate::data;
349 use crate::private::arena::*;
350 #[cfg(feature = "alloc")]
351 use crate::private::capability::ClientHook;
352 use crate::private::layout::ElementSize::*;
353 use crate::private::layout::{data_bits_per_element, pointers_per_element};
354 use crate::private::layout::{CapTableBuilder, CapTableReader};
355 use crate::private::layout::{
356 ElementSize, ListBuilder, ListReader, StructBuilder, StructReader, StructSize, WirePointer,
357 WirePointerKind,
358 };
359 use crate::private::units::*;
360 use crate::text;
361 use crate::{Error, ErrorKind, MessageSize, Result};
362
363 pub struct SegmentAnd<T> {
364 #[allow(dead_code)]
365 segment_id: u32,
366 pub value: T,
367 }
368
369 #[inline]
370 pub fn round_bytes_up_to_words(bytes: ByteCount32) -> WordCount32 {
371 (bytes + 7) / BYTES_PER_WORD as u32
373 }
374
375 #[inline]
380 pub fn round_bits_up_to_words(bits: BitCount64) -> WordCount32 {
381 ((bits + 63) / (BITS_PER_WORD as u64)) as WordCount32
383 }
384
385 #[allow(dead_code)]
386 #[inline]
387 pub fn round_bits_up_to_bytes(bits: BitCount64) -> ByteCount32 {
388 ((bits + 7) / (BITS_PER_BYTE as u64)) as ByteCount32
389 }
390
391 #[inline]
392 pub fn bounds_check(
393 arena: &dyn ReaderArena,
394 segment_id: u32,
395 start: *const u8,
396 size_in_words: usize,
397 _kind: WirePointerKind,
398 ) -> Result<()> {
399 arena.contains_interval(segment_id, start, size_in_words)
400 }
401
402 #[inline]
403 pub fn amplified_read(arena: &dyn ReaderArena, virtual_amount: u64) -> Result<()> {
404 arena.amplified_read(virtual_amount)
405 }
406
407 #[inline]
408 pub unsafe fn copy_nonoverlapping_check_zero<T>(src: *const T, dst: *mut T, count: usize) {
409 if count > 0 {
410 ptr::copy_nonoverlapping(src, dst, count);
411 }
412 }
413
414 #[inline]
415 pub unsafe fn allocate(
416 arena: &mut dyn BuilderArena,
417 reff: *mut WirePointer,
418 segment_id: u32,
419 amount: WordCount32,
420 kind: WirePointerKind,
421 ) -> (*mut u8, *mut WirePointer, u32) {
422 let is_null = (*reff).is_null();
423 if !is_null {
424 zero_object(arena, segment_id, reff)
425 }
426
427 if amount == 0 && kind == WirePointerKind::Struct {
428 (*reff).set_kind_and_target_for_empty_struct();
429 return (reff as *mut _, reff, segment_id);
430 }
431
432 match arena.allocate(segment_id, amount) {
433 None => {
434 let amount_plus_ref = amount + POINTER_SIZE_IN_WORDS as u32;
439 let (segment_id, word_idx) = arena.allocate_anywhere(amount_plus_ref);
440 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
441 let ptr = seg_start.offset(word_idx as isize * BYTES_PER_WORD as isize);
442
443 (*reff).set_far(false, word_idx);
446 (*reff).set_far_segment_id(segment_id);
447
448 let reff = ptr as *mut WirePointer;
451
452 let ptr1 = ptr.add(BYTES_PER_WORD);
453 (*reff).set_kind_and_target(kind, ptr1);
454 (ptr1, reff, segment_id)
455 }
456 Some(idx) => {
457 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
458 let ptr = (seg_start).offset(idx as isize * BYTES_PER_WORD as isize);
459 (*reff).set_kind_and_target(kind, ptr);
460 (ptr, reff, segment_id)
461 }
462 }
463 }
464
465 #[inline]
466 pub unsafe fn follow_builder_fars(
467 arena: &mut dyn BuilderArena,
468 reff: *mut WirePointer,
469 ref_target: *mut u8,
470 segment_id: u32,
471 ) -> Result<(*mut u8, *mut WirePointer, u32)> {
472 if (*reff).kind() == WirePointerKind::Far {
482 let segment_id = (*reff).far_segment_id();
483 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
484 let pad: *mut WirePointer =
485 (seg_start as *mut WirePointer).offset((*reff).far_position_in_segment() as isize);
486 if !(*reff).is_double_far() {
487 Ok((WirePointer::mut_target(pad), pad, segment_id))
488 } else {
489 let reff = pad.offset(1);
492
493 let segment_id = (*pad).far_segment_id();
494 let (segment_start, _segment_len) = arena.get_segment_mut(segment_id);
495 let ptr = segment_start
496 .offset((*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
497 Ok((ptr, reff, segment_id))
498 }
499 } else {
500 Ok((ref_target, reff, segment_id))
501 }
502 }
503
504 #[inline]
509 pub unsafe fn follow_fars(
510 arena: &dyn ReaderArena,
511 reff: *const WirePointer,
512 segment_id: u32,
513 ) -> Result<(*const u8, *const WirePointer, u32)> {
514 if (*reff).kind() == WirePointerKind::Far {
515 let far_segment_id = (*reff).far_segment_id();
516
517 let (seg_start, _seg_len) = arena.get_segment(far_segment_id)?;
518 let ptr = seg_start
519 .offset((*reff).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
520
521 let pad_words: usize = if (*reff).is_double_far() { 2 } else { 1 };
522 bounds_check(arena, far_segment_id, ptr, pad_words, WirePointerKind::Far)?;
523
524 let pad: *const WirePointer = ptr as *const _;
525
526 if !(*reff).is_double_far() {
527 Ok((
528 WirePointer::target_from_segment(pad, arena, far_segment_id)?,
529 pad,
530 far_segment_id,
531 ))
532 } else {
533 let tag = pad.offset(1);
537 let double_far_segment_id = (*pad).far_segment_id();
538 let (segment_start, _segment_len) = arena.get_segment(double_far_segment_id)?;
539 let ptr = segment_start
540 .offset((*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
541 Ok((ptr, tag, double_far_segment_id))
542 }
543 } else {
544 Ok((
545 WirePointer::target_from_segment(reff, arena, segment_id)?,
546 reff,
547 segment_id,
548 ))
549 }
550 }
551
552 pub unsafe fn zero_object(
553 arena: &mut dyn BuilderArena,
554 segment_id: u32,
555 reff: *mut WirePointer,
556 ) {
557 match (*reff).kind() {
562 WirePointerKind::Struct | WirePointerKind::List | WirePointerKind::Other => {
563 zero_object_helper(arena, segment_id, reff, WirePointer::mut_target(reff))
564 }
565 WirePointerKind::Far => {
566 let segment_id = (*reff).far_segment_id();
567 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
568 let pad: *mut WirePointer = (seg_start as *mut WirePointer)
569 .offset((*reff).far_position_in_segment() as isize);
570
571 if (*reff).is_double_far() {
572 let segment_id = (*pad).far_segment_id();
573
574 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
575 let ptr = seg_start.offset(
576 (*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize,
577 );
578 zero_object_helper(arena, segment_id, pad.offset(1), ptr);
579
580 ptr::write_bytes(pad, 0u8, 2);
581 } else {
582 zero_object(arena, segment_id, pad);
583 ptr::write_bytes(pad, 0u8, 1);
584 }
585 }
586 }
587 }
588
589 pub unsafe fn zero_object_helper(
590 arena: &mut dyn BuilderArena,
591 segment_id: u32,
592 tag: *mut WirePointer,
593 ptr: *mut u8,
594 ) {
595 match (*tag).kind() {
596 WirePointerKind::Other => {
597 panic!("Don't know how to handle OTHER")
598 }
599 WirePointerKind::Struct => {
600 let pointer_section: *mut WirePointer = ptr
601 .offset((*tag).struct_data_size() as isize * BYTES_PER_WORD as isize)
602 as *mut _;
603
604 let count = (*tag).struct_ptr_count() as isize;
605 for i in 0..count {
606 zero_object(arena, segment_id, pointer_section.offset(i));
607 }
608 ptr::write_bytes(
609 ptr,
610 0u8,
611 (*tag).struct_word_size() as usize * BYTES_PER_WORD,
612 );
613 }
614 WirePointerKind::List => match (*tag).list_element_size() {
615 Void => {}
616 Bit | Byte | TwoBytes | FourBytes | EightBytes => ptr::write_bytes(
617 ptr,
618 0u8,
619 BYTES_PER_WORD
620 * round_bits_up_to_words(
621 u64::from((*tag).list_element_count())
622 * u64::from(data_bits_per_element((*tag).list_element_size())),
623 ) as usize,
624 ),
625 Pointer => {
626 let count = (*tag).list_element_count() as usize;
627 for i in 0..count as isize {
628 zero_object(
629 arena,
630 segment_id,
631 ptr.offset(i * BYTES_PER_WORD as isize) as *mut _,
632 );
633 }
634 ptr::write_bytes(ptr, 0u8, count * BYTES_PER_WORD);
635 }
636 InlineComposite => {
637 let element_tag: *mut WirePointer = ptr as *mut _;
638
639 assert!(
640 (*element_tag).kind() == WirePointerKind::Struct,
641 "Don't know how to handle non-STRUCT inline composite"
642 );
643
644 let data_size = (*element_tag).struct_data_size();
645 let pointer_count = (*element_tag).struct_ptr_count();
646 let mut pos = ptr.add(BYTES_PER_WORD);
647 let count = (*element_tag).inline_composite_list_element_count();
648 if pointer_count > 0 {
649 for _ in 0..count {
650 pos = pos.offset(data_size as isize * BYTES_PER_WORD as isize);
651 for _ in 0..pointer_count {
652 zero_object(arena, segment_id, pos as *mut WirePointer);
653 pos = pos.add(BYTES_PER_WORD);
654 }
655 }
656 }
657 ptr::write_bytes(
658 ptr,
659 0u8,
660 BYTES_PER_WORD * ((*element_tag).struct_word_size() * count + 1) as usize,
661 );
662 }
663 },
664 WirePointerKind::Far => {
665 panic!("Unexpected FAR pointer")
666 }
667 }
668 }
669
670 #[inline]
671 pub unsafe fn zero_pointer_and_fars(
672 arena: &mut dyn BuilderArena,
673 _segment_id: u32,
674 reff: *mut WirePointer,
675 ) -> Result<()> {
676 if (*reff).kind() == WirePointerKind::Far {
680 let far_segment_id = (*reff).far_segment_id();
681 let (seg_start, _seg_len) = arena.get_segment_mut(far_segment_id);
682 let pad = seg_start
683 .offset((*reff).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
684 let num_elements = if (*reff).is_double_far() { 2 } else { 1 };
685 ptr::write_bytes(pad, 0, num_elements * BYTES_PER_WORD);
686 }
687 ptr::write_bytes(reff, 0, 1);
688 Ok(())
689 }
690
691 pub unsafe fn total_size(
692 arena: &dyn ReaderArena,
693 segment_id: u32,
694 reff: *const WirePointer,
695 mut nesting_limit: i32,
696 ) -> Result<MessageSize> {
697 let mut result = MessageSize {
698 word_count: 0,
699 cap_count: 0,
700 };
701
702 if (*reff).is_null() {
703 return Ok(result);
704 };
705
706 if nesting_limit <= 0 {
707 return Err(Error::from_kind(ErrorKind::MessageIsTooDeeplyNested));
708 }
709
710 nesting_limit -= 1;
711
712 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
713
714 match (*reff).kind() {
715 WirePointerKind::Struct => {
716 bounds_check(
717 arena,
718 segment_id,
719 ptr,
720 (*reff).struct_word_size() as usize,
721 WirePointerKind::Struct,
722 )?;
723 result.word_count += u64::from((*reff).struct_word_size());
724
725 let pointer_section: *const WirePointer = ptr
726 .offset((*reff).struct_data_size() as isize * BYTES_PER_WORD as isize)
727 as *const _;
728 let count: isize = (*reff).struct_ptr_count() as isize;
729 for i in 0..count {
730 result +=
731 total_size(arena, segment_id, pointer_section.offset(i), nesting_limit)?;
732 }
733 }
734 WirePointerKind::List => {
735 match (*reff).list_element_size() {
736 Void => {}
737 Bit | Byte | TwoBytes | FourBytes | EightBytes => {
738 let total_words = round_bits_up_to_words(
739 u64::from((*reff).list_element_count())
740 * u64::from(data_bits_per_element((*reff).list_element_size())),
741 );
742 bounds_check(
743 arena,
744 segment_id,
745 ptr,
746 total_words as usize,
747 WirePointerKind::List,
748 )?;
749 result.word_count += u64::from(total_words);
750 }
751 Pointer => {
752 let count = (*reff).list_element_count();
753 bounds_check(
754 arena,
755 segment_id,
756 ptr,
757 count as usize * WORDS_PER_POINTER,
758 WirePointerKind::List,
759 )?;
760
761 result.word_count += u64::from(count) * WORDS_PER_POINTER as u64;
762
763 for i in 0..count as isize {
764 result += total_size(
765 arena,
766 segment_id,
767 (ptr as *const WirePointer).offset(i),
768 nesting_limit,
769 )?;
770 }
771 }
772 InlineComposite => {
773 let word_count = (*reff).list_inline_composite_word_count();
774 bounds_check(
775 arena,
776 segment_id,
777 ptr,
778 word_count as usize + POINTER_SIZE_IN_WORDS,
779 WirePointerKind::List,
780 )?;
781
782 let element_tag: *const WirePointer = ptr as *const _;
783 let count = (*element_tag).inline_composite_list_element_count();
784
785 if (*element_tag).kind() != WirePointerKind::Struct {
786 return Err(Error::from_kind(
787 ErrorKind::CantHandleNonStructInlineComposite,
788 ));
789 }
790
791 let actual_size =
792 u64::from((*element_tag).struct_word_size()) * u64::from(count);
793 if actual_size > u64::from(word_count) {
794 return Err(Error::from_kind(
795 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
796 ));
797 }
798
799 result.word_count += actual_size + POINTER_SIZE_IN_WORDS as u64;
802
803 let data_size = (*element_tag).struct_data_size();
804 let pointer_count = (*element_tag).struct_ptr_count();
805
806 if pointer_count > 0 {
807 let mut pos = ptr.add(BYTES_PER_WORD);
808 for _ in 0..count {
809 pos = pos.offset(data_size as isize * BYTES_PER_WORD as isize);
810
811 for _ in 0..pointer_count {
812 result += total_size(
813 arena,
814 segment_id,
815 pos as *const WirePointer,
816 nesting_limit,
817 )?;
818 pos = pos.add(BYTES_PER_WORD);
819 }
820 }
821 }
822 }
823 }
824 }
825 WirePointerKind::Far => {
826 return Err(Error::from_kind(ErrorKind::MalformedDoubleFarPointer));
827 }
828 WirePointerKind::Other => {
829 if (*reff).is_capability() {
830 result.cap_count += 1;
831 } else {
832 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
833 }
834 }
835 }
836
837 Ok(result)
838 }
839
840 unsafe fn copy_struct(
842 arena: &mut dyn BuilderArena,
843 segment_id: u32,
844 cap_table: CapTableBuilder,
845 dst: *mut u8,
846 src: *const u8,
847 data_size: isize,
848 pointer_count: isize,
849 ) {
850 copy_nonoverlapping_check_zero(src, dst, data_size as usize * BYTES_PER_WORD);
851
852 let src_refs: *const WirePointer = (src as *const WirePointer).offset(data_size);
853 let dst_refs: *mut WirePointer = (dst as *mut WirePointer).offset(data_size);
854
855 for ii in 0..pointer_count {
856 copy_message(
857 arena,
858 segment_id,
859 cap_table,
860 dst_refs.offset(ii),
861 src_refs.offset(ii),
862 );
863 }
864 }
865
866 pub unsafe fn copy_message(
869 arena: &mut dyn BuilderArena,
870 segment_id: u32,
871 cap_table: CapTableBuilder,
872 dst: *mut WirePointer,
873 src: *const WirePointer,
874 ) -> (*mut u8, *mut WirePointer, u32) {
875 match (*src).kind() {
876 WirePointerKind::Struct => {
877 if (*src).is_null() {
878 ptr::write_bytes(dst, 0, 1);
879 (ptr::null_mut(), dst, segment_id)
880 } else {
881 let src_ptr = WirePointer::target(src);
882 let (dst_ptr, dst, segment_id) = allocate(
883 arena,
884 dst,
885 segment_id,
886 (*src).struct_word_size(),
887 WirePointerKind::Struct,
888 );
889 copy_struct(
890 arena,
891 segment_id,
892 cap_table,
893 dst_ptr,
894 src_ptr,
895 (*src).struct_data_size() as isize,
896 (*src).struct_ptr_count() as isize,
897 );
898 (*dst).set_struct_size_from_pieces(
899 (*src).struct_data_size(),
900 (*src).struct_ptr_count(),
901 );
902 (dst_ptr, dst, segment_id)
903 }
904 }
905 WirePointerKind::List => match (*src).list_element_size() {
906 ElementSize::Void
907 | ElementSize::Bit
908 | ElementSize::Byte
909 | ElementSize::TwoBytes
910 | ElementSize::FourBytes
911 | ElementSize::EightBytes => {
912 let word_count = round_bits_up_to_words(
913 u64::from((*src).list_element_count())
914 * u64::from(data_bits_per_element((*src).list_element_size())),
915 );
916 let src_ptr = WirePointer::target(src);
917 let (dst_ptr, dst, segment_id) =
918 allocate(arena, dst, segment_id, word_count, WirePointerKind::List);
919 copy_nonoverlapping_check_zero(
920 src_ptr,
921 dst_ptr,
922 word_count as usize * BYTES_PER_WORD,
923 );
924 (*dst).set_list_size_and_count(
925 (*src).list_element_size(),
926 (*src).list_element_count(),
927 );
928 (dst_ptr, dst, segment_id)
929 }
930
931 ElementSize::Pointer => {
932 let src_refs: *const WirePointer = WirePointer::target(src) as _;
933 let (dst_refs, dst, segment_id) = allocate(
934 arena,
935 dst,
936 segment_id,
937 (*src).list_element_count(),
938 WirePointerKind::List,
939 );
940 for ii in 0..((*src).list_element_count() as isize) {
941 copy_message(
942 arena,
943 segment_id,
944 cap_table,
945 dst_refs.offset(ii * BYTES_PER_WORD as isize) as *mut WirePointer,
946 src_refs.offset(ii),
947 );
948 }
949 (*dst)
950 .set_list_size_and_count(ElementSize::Pointer, (*src).list_element_count());
951 (dst_refs, dst, segment_id)
952 }
953 ElementSize::InlineComposite => {
954 let src_ptr = WirePointer::target(src);
955 let (dst_ptr, dst, segment_id) = allocate(
956 arena,
957 dst,
958 segment_id,
959 (*src).list_inline_composite_word_count() + 1,
960 WirePointerKind::List,
961 );
962
963 (*dst).set_list_inline_composite((*src).list_inline_composite_word_count());
964
965 let src_tag: *const WirePointer = src_ptr as _;
966 ptr::copy_nonoverlapping(src_tag, dst_ptr as *mut WirePointer, 1);
967
968 let mut src_element = src_ptr.add(BYTES_PER_WORD);
969 let mut dst_element = dst_ptr.add(BYTES_PER_WORD);
970
971 if (*src_tag).kind() != WirePointerKind::Struct {
972 panic!("unsupported INLINE_COMPOSITE list");
973 }
974 for _ in 0..(*src_tag).inline_composite_list_element_count() {
975 copy_struct(
976 arena,
977 segment_id,
978 cap_table,
979 dst_element,
980 src_element,
981 (*src_tag).struct_data_size() as isize,
982 (*src_tag).struct_ptr_count() as isize,
983 );
984 src_element = src_element.offset(
985 BYTES_PER_WORD as isize * (*src_tag).struct_word_size() as isize,
986 );
987 dst_element = dst_element.offset(
988 BYTES_PER_WORD as isize * (*src_tag).struct_word_size() as isize,
989 );
990 }
991 (dst_ptr, dst, segment_id)
992 }
993 },
994 WirePointerKind::Other => {
995 panic!("Unchecked message contained an OTHER pointer.")
996 }
997 WirePointerKind::Far => {
998 panic!("Unchecked message contained a far pointer.")
999 }
1000 }
1001 }
1002
1003 pub unsafe fn transfer_pointer(
1004 arena: &mut dyn BuilderArena,
1005 dst_segment_id: u32,
1006 dst: *mut WirePointer,
1007 src_segment_id: u32,
1008 src: *mut WirePointer,
1009 ) {
1010 assert!((*dst).is_null());
1021 if (*src).is_null() {
1024 ptr::write_bytes(dst, 0, 1);
1025 } else if (*src).is_positional() {
1026 transfer_pointer_split(
1027 arena,
1028 dst_segment_id,
1029 dst,
1030 src_segment_id,
1031 src,
1032 WirePointer::mut_target(src),
1033 );
1034 } else {
1035 ptr::copy_nonoverlapping(src, dst, 1);
1036 }
1037 }
1038
1039 pub unsafe fn transfer_pointer_split(
1040 arena: &mut dyn BuilderArena,
1041 dst_segment_id: u32,
1042 dst: *mut WirePointer,
1043 src_segment_id: u32,
1044 src_tag: *mut WirePointer,
1045 src_ptr: *mut u8,
1046 ) {
1047 if dst_segment_id == src_segment_id {
1051 if (*src_tag).kind() == WirePointerKind::Struct && (*src_tag).struct_word_size() == 0 {
1054 (*dst).set_kind_and_target_for_empty_struct();
1055 } else {
1056 (*dst).set_kind_and_target((*src_tag).kind(), src_ptr);
1057 }
1058 ptr::copy_nonoverlapping(&(*src_tag).upper32bits, &mut (*dst).upper32bits, 1);
1060 } else {
1061 match arena.allocate(src_segment_id, 1) {
1065 None => {
1066 let (far_segment_id, word_idx) = arena.allocate_anywhere(2);
1068 let (seg_start, _seg_len) = arena.get_segment_mut(far_segment_id);
1069 let landing_pad: *mut WirePointer =
1070 (seg_start as *mut WirePointer).offset(word_idx as isize);
1071
1072 let (src_seg_start, _seg_len) = arena.get_segment_mut(src_segment_id);
1073
1074 (*landing_pad).set_far(
1075 false,
1076 ((src_ptr as usize - src_seg_start as usize) / BYTES_PER_WORD) as u32,
1077 );
1078 (*landing_pad).set_far_segment_id(src_segment_id);
1079
1080 let landing_pad1 = landing_pad.offset(1);
1081 (*landing_pad1).set_kind_with_zero_offset((*src_tag).kind());
1082
1083 ptr::copy_nonoverlapping(
1084 &(*src_tag).upper32bits,
1085 &mut (*landing_pad1).upper32bits,
1086 1,
1087 );
1088
1089 (*dst).set_far(true, word_idx);
1090 (*dst).set_far_segment_id(far_segment_id);
1091 }
1092 Some(landing_pad_word) => {
1093 let (seg_start, seg_len) = arena.get_segment_mut(src_segment_id);
1095 assert!(landing_pad_word < seg_len);
1096 let landing_pad: *mut WirePointer =
1097 (seg_start as *mut WirePointer).offset(landing_pad_word as isize);
1098 (*landing_pad).set_kind_and_target((*src_tag).kind(), src_ptr);
1099 ptr::copy_nonoverlapping(
1100 &(*src_tag).upper32bits,
1101 &mut (*landing_pad).upper32bits,
1102 1,
1103 );
1104
1105 (*dst).set_far(false, landing_pad_word);
1106 (*dst).set_far_segment_id(src_segment_id);
1107 }
1108 }
1109 }
1110 }
1111
1112 #[inline]
1113 pub unsafe fn init_struct_pointer(
1114 arena: &mut dyn BuilderArena,
1115 reff: *mut WirePointer,
1116 segment_id: u32,
1117 cap_table: CapTableBuilder,
1118 size: StructSize,
1119 ) -> StructBuilder<'_> {
1120 let (ptr, reff, segment_id) = allocate(
1121 arena,
1122 reff,
1123 segment_id,
1124 size.total(),
1125 WirePointerKind::Struct,
1126 );
1127 (*reff).set_struct_size(size);
1128
1129 StructBuilder {
1130 arena,
1131 segment_id,
1132 cap_table,
1133 data: ptr as *mut _,
1134 pointers: ptr.offset((size.data as usize) as isize * BYTES_PER_WORD as isize) as *mut _,
1135 data_size: u32::from(size.data) * (BITS_PER_WORD as BitCount32),
1136 pointer_count: size.pointers,
1137 }
1138 }
1139
1140 #[inline]
1141 pub unsafe fn get_writable_struct_pointer<'a>(
1142 arena: &'a mut dyn BuilderArena,
1143 mut reff: *mut WirePointer,
1144 mut segment_id: u32,
1145 cap_table: CapTableBuilder,
1146 size: StructSize,
1147 default: Option<&'a [crate::Word]>,
1148 ) -> Result<StructBuilder<'a>> {
1149 let mut ref_target = WirePointer::mut_target(reff);
1150
1151 if (*reff).is_null() {
1152 match default {
1153 None => {
1154 return Ok(init_struct_pointer(
1155 arena, reff, segment_id, cap_table, size,
1156 ))
1157 }
1158 Some(d) if (*(d.as_ptr() as *const WirePointer)).is_null() => {
1159 return Ok(init_struct_pointer(
1160 arena, reff, segment_id, cap_table, size,
1161 ))
1162 }
1163 Some(d) => {
1164 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1165 arena,
1166 segment_id,
1167 cap_table,
1168 reff,
1169 d.as_ptr() as *const WirePointer,
1170 );
1171 reff = new_reff;
1172 segment_id = new_segment_id;
1173 ref_target = new_ref_target;
1174 }
1175 }
1176 }
1177
1178 let (old_ptr, old_ref, old_segment_id) =
1179 follow_builder_fars(arena, reff, ref_target, segment_id)?;
1180 if (*old_ref).kind() != WirePointerKind::Struct {
1181 return Err(Error::from_kind(
1182 ErrorKind::MessageContainsNonStructPointerWhereStructPointerWasExpected,
1183 ));
1184 }
1185
1186 let old_data_size = (*old_ref).struct_data_size();
1187 let old_pointer_count = (*old_ref).struct_ptr_count();
1188 let old_pointer_section: *mut WirePointer =
1189 old_ptr.offset(old_data_size as isize * BYTES_PER_WORD as isize) as *mut _;
1190
1191 if old_data_size < size.data || old_pointer_count < size.pointers {
1192 let new_data_size = ::core::cmp::max(old_data_size, size.data);
1199 let new_pointer_count = ::core::cmp::max(old_pointer_count, size.pointers);
1200 let total_size =
1201 u32::from(new_data_size) + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1202
1203 zero_pointer_and_fars(arena, segment_id, reff)?;
1205
1206 let (ptr, reff, segment_id) =
1207 allocate(arena, reff, segment_id, total_size, WirePointerKind::Struct);
1208 (*reff).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1209
1210 copy_nonoverlapping_check_zero(old_ptr, ptr, old_data_size as usize * BYTES_PER_WORD);
1213
1214 let new_pointer_section: *mut WirePointer =
1216 ptr.offset(new_data_size as isize * BYTES_PER_WORD as isize) as *mut _;
1217 for i in 0..old_pointer_count as isize {
1218 transfer_pointer(
1219 arena,
1220 segment_id,
1221 new_pointer_section.offset(i),
1222 old_segment_id,
1223 old_pointer_section.offset(i),
1224 );
1225 }
1226
1227 ptr::write_bytes(
1228 old_ptr,
1229 0,
1230 (old_data_size as usize + old_pointer_count as usize) * BYTES_PER_WORD,
1231 );
1232
1233 Ok(StructBuilder {
1234 arena,
1235 segment_id,
1236 cap_table,
1237 data: ptr as *mut _,
1238 pointers: new_pointer_section,
1239 data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1240 pointer_count: new_pointer_count,
1241 })
1242 } else {
1243 Ok(StructBuilder {
1244 arena,
1245 segment_id: old_segment_id,
1246 cap_table,
1247 data: old_ptr,
1248 pointers: old_pointer_section,
1249 data_size: u32::from(old_data_size) * BITS_PER_WORD as u32,
1250 pointer_count: old_pointer_count,
1251 })
1252 }
1253 }
1254
1255 #[inline]
1256 pub unsafe fn init_list_pointer(
1257 arena: &mut dyn BuilderArena,
1258 reff: *mut WirePointer,
1259 segment_id: u32,
1260 cap_table: CapTableBuilder,
1261 element_count: ElementCount32,
1262 element_size: ElementSize,
1263 ) -> ListBuilder<'_> {
1264 assert!(
1265 element_size != InlineComposite,
1266 "Should have called initStructListPointer() instead"
1267 );
1268
1269 let data_size = data_bits_per_element(element_size);
1270 let pointer_count = pointers_per_element(element_size);
1271 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
1272 let word_count = round_bits_up_to_words(u64::from(element_count) * u64::from(step));
1273 let (ptr, reff, segment_id) =
1274 allocate(arena, reff, segment_id, word_count, WirePointerKind::List);
1275
1276 (*reff).set_list_size_and_count(element_size, element_count);
1277
1278 ListBuilder {
1279 arena,
1280 segment_id,
1281 cap_table,
1282 ptr,
1283 step,
1284 element_count,
1285 element_size,
1286 struct_data_size: data_size,
1287 struct_pointer_count: pointer_count as u16,
1288 }
1289 }
1290
1291 #[inline]
1292 pub unsafe fn init_struct_list_pointer(
1293 arena: &mut dyn BuilderArena,
1294 reff: *mut WirePointer,
1295 segment_id: u32,
1296 cap_table: CapTableBuilder,
1297 element_count: ElementCount32,
1298 element_size: StructSize,
1299 ) -> ListBuilder<'_> {
1300 let words_per_element = element_size.total();
1301
1302 let word_count_u64 = u64::from(element_count) * u64::from(words_per_element);
1304 assert!(
1305 word_count_u64 < (1 << 29),
1306 "Inline composite lists are limited to 2**29 words"
1307 );
1308 let word_count: WordCount32 = word_count_u64 as u32;
1309 let (ptr, reff, segment_id) = allocate(
1310 arena,
1311 reff,
1312 segment_id,
1313 POINTER_SIZE_IN_WORDS as u32 + word_count,
1314 WirePointerKind::List,
1315 );
1316 let ptr = ptr as *mut WirePointer;
1317
1318 (*reff).set_list_inline_composite(word_count);
1320 (*ptr).set_kind_and_inline_composite_list_element_count(
1321 WirePointerKind::Struct,
1322 element_count,
1323 );
1324 (*ptr).set_struct_size(element_size);
1325
1326 let ptr1 = ptr.add(POINTER_SIZE_IN_WORDS);
1327
1328 ListBuilder {
1329 arena,
1330 segment_id,
1331 cap_table,
1332 ptr: ptr1 as *mut _,
1333 step: words_per_element * BITS_PER_WORD as u32,
1334 element_count,
1335 element_size: ElementSize::InlineComposite,
1336 struct_data_size: u32::from(element_size.data) * (BITS_PER_WORD as u32),
1337 struct_pointer_count: element_size.pointers,
1338 }
1339 }
1340
1341 #[inline]
1342 pub unsafe fn get_writable_list_pointer(
1343 arena: &mut dyn BuilderArena,
1344 mut orig_ref: *mut WirePointer,
1345 mut orig_segment_id: u32,
1346 cap_table: CapTableBuilder,
1347 element_size: ElementSize,
1348 default_value: *const u8,
1349 ) -> Result<ListBuilder<'_>> {
1350 assert!(
1351 element_size != InlineComposite,
1352 "Use get_writable_struct_list_pointer() for struct lists"
1353 );
1354
1355 let mut orig_ref_target = WirePointer::mut_target(orig_ref);
1356
1357 if (*orig_ref).is_null() {
1358 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
1359 return Ok(ListBuilder::new_default(arena));
1360 }
1361 let (new_orig_ref_target, new_orig_ref, new_orig_segment_id) = copy_message(
1362 arena,
1363 orig_segment_id,
1364 cap_table,
1365 orig_ref,
1366 default_value as *const WirePointer,
1367 );
1368 orig_ref_target = new_orig_ref_target;
1369 orig_ref = new_orig_ref;
1370 orig_segment_id = new_orig_segment_id;
1371 }
1372
1373 let (mut ptr, reff, segment_id) =
1379 follow_builder_fars(arena, orig_ref, orig_ref_target, orig_segment_id)?;
1380
1381 if (*reff).kind() != WirePointerKind::List {
1382 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1383 }
1384
1385 let old_size = (*reff).list_element_size();
1386
1387 if old_size == InlineComposite {
1388 let tag: *const WirePointer = ptr as *const _;
1396
1397 if (*tag).kind() != WirePointerKind::Struct {
1398 return Err(Error::from_kind(
1399 ErrorKind::InlineCompositeListWithNonStructElementsNotSupported,
1400 ));
1401 }
1402
1403 ptr = ptr.add(BYTES_PER_WORD);
1404
1405 let data_size = (*tag).struct_data_size();
1406 let pointer_count = (*tag).struct_ptr_count();
1407
1408 match element_size {
1409 Void => {} Bit => {
1411 return Err(Error::from_kind(
1412 ErrorKind::FoundStructListWhereBitListWasExpected,
1413 ));
1414 }
1415 Byte | TwoBytes | FourBytes | EightBytes => {
1416 if data_size < 1 {
1417 return Err(Error::from_kind(
1418 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1419 ));
1420 }
1421 }
1422 Pointer => {
1423 if pointer_count < 1 {
1424 return Err(Error::from_kind(
1425 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1426 ));
1427 }
1428 ptr = ptr.offset(data_size as isize * BYTES_PER_WORD as isize);
1430 }
1431 InlineComposite => {
1432 unreachable!()
1433 }
1434 }
1435 Ok(ListBuilder {
1438 arena,
1439 segment_id,
1440 cap_table,
1441 ptr: ptr as *mut _,
1442 element_count: (*tag).inline_composite_list_element_count(),
1443 element_size: ElementSize::InlineComposite,
1444 step: (*tag).struct_word_size() * BITS_PER_WORD as u32,
1445 struct_data_size: u32::from(data_size) * BITS_PER_WORD as u32,
1446 struct_pointer_count: pointer_count,
1447 })
1448 } else {
1449 let data_size = data_bits_per_element(old_size);
1450 let pointer_count = pointers_per_element(old_size);
1451
1452 if data_size < data_bits_per_element(element_size)
1453 || pointer_count < pointers_per_element(element_size)
1454 {
1455 return Err(Error::from_kind(
1456 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1457 ));
1458 }
1459
1460 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
1461
1462 Ok(ListBuilder {
1463 arena,
1464 segment_id,
1465 cap_table,
1466 ptr: ptr as *mut _,
1467 step,
1468 element_count: (*reff).list_element_count(),
1469 element_size: old_size,
1470 struct_data_size: data_size,
1471 struct_pointer_count: pointer_count as u16,
1472 })
1473 }
1474 }
1475
1476 #[inline]
1477 pub unsafe fn get_writable_struct_list_pointer(
1478 arena: &mut dyn BuilderArena,
1479 mut orig_ref: *mut WirePointer,
1480 mut orig_segment_id: u32,
1481 cap_table: CapTableBuilder,
1482 element_size: StructSize,
1483 default_value: *const u8,
1484 ) -> Result<ListBuilder<'_>> {
1485 let mut orig_ref_target = WirePointer::mut_target(orig_ref);
1486
1487 if (*orig_ref).is_null() {
1488 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
1489 return Ok(ListBuilder::new_default(arena));
1490 }
1491 let (new_orig_ref_target, new_orig_ref, new_orig_segment_id) = copy_message(
1492 arena,
1493 orig_segment_id,
1494 cap_table,
1495 orig_ref,
1496 default_value as *const WirePointer,
1497 );
1498 orig_ref_target = new_orig_ref_target;
1499 orig_ref = new_orig_ref;
1500 orig_segment_id = new_orig_segment_id;
1501 }
1502
1503 let (mut old_ptr, old_ref, old_segment_id) =
1506 follow_builder_fars(arena, orig_ref, orig_ref_target, orig_segment_id)?;
1507
1508 if (*old_ref).kind() != WirePointerKind::List {
1509 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1510 }
1511
1512 let old_size = (*old_ref).list_element_size();
1513
1514 if old_size == InlineComposite {
1515 let old_tag: *const WirePointer = old_ptr as *const _;
1518 old_ptr = old_ptr.add(BYTES_PER_WORD);
1519 if (*old_tag).kind() != WirePointerKind::Struct {
1520 return Err(Error::from_kind(
1521 ErrorKind::InlineCompositeListWithNonStructElementsNotSupported,
1522 ));
1523 }
1524
1525 let old_data_size = (*old_tag).struct_data_size();
1526 let old_pointer_count = (*old_tag).struct_ptr_count();
1527 let old_step =
1528 u32::from(old_data_size) + u32::from(old_pointer_count) * WORDS_PER_POINTER as u32;
1529 let element_count = (*old_tag).inline_composite_list_element_count();
1530
1531 if old_data_size >= element_size.data && old_pointer_count >= element_size.pointers {
1532 return Ok(ListBuilder {
1534 arena,
1535 segment_id: old_segment_id,
1536 cap_table,
1537 ptr: old_ptr as *mut _,
1538 element_count,
1539 element_size: ElementSize::InlineComposite,
1540 step: old_step * BITS_PER_WORD as u32,
1541 struct_data_size: u32::from(old_data_size) * BITS_PER_WORD as u32,
1542 struct_pointer_count: old_pointer_count,
1543 });
1544 }
1545
1546 let new_data_size = ::core::cmp::max(old_data_size, element_size.data);
1550 let new_pointer_count = ::core::cmp::max(old_pointer_count, element_size.pointers);
1551 let new_step =
1552 u32::from(new_data_size) + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1553
1554 let total_size_u64 = u64::from(new_step) * u64::from(element_count);
1555 if total_size_u64 >= (1 << 29) {
1556 return Err(Error::from_kind(ErrorKind::MessageTooLarge(
1557 total_size_u64 as usize,
1558 )));
1559 }
1560 let total_size = total_size_u64 as u32;
1561
1562 zero_pointer_and_fars(arena, orig_segment_id, orig_ref)?;
1564
1565 let (mut new_ptr, new_ref, new_segment_id) = allocate(
1566 arena,
1567 orig_ref,
1568 orig_segment_id,
1569 total_size + POINTER_SIZE_IN_WORDS as u32,
1570 WirePointerKind::List,
1571 );
1572 (*new_ref).set_list_inline_composite(total_size);
1573
1574 let new_tag: *mut WirePointer = new_ptr as *mut _;
1575 (*new_tag).set_kind_and_inline_composite_list_element_count(
1576 WirePointerKind::Struct,
1577 element_count,
1578 );
1579 (*new_tag).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1580 new_ptr = new_ptr.add(BYTES_PER_WORD);
1581
1582 let mut src = old_ptr as *mut WirePointer;
1583 let mut dst = new_ptr as *mut WirePointer;
1584 for _ in 0..element_count {
1585 copy_nonoverlapping_check_zero(src, dst, old_data_size as usize);
1587
1588 let new_pointer_section = dst.offset(new_data_size as isize);
1590 let old_pointer_section = src.offset(old_data_size as isize);
1591 for jj in 0..(old_pointer_count as isize) {
1592 transfer_pointer(
1593 arena,
1594 new_segment_id,
1595 new_pointer_section.offset(jj),
1596 old_segment_id,
1597 old_pointer_section.offset(jj),
1598 );
1599 }
1600
1601 dst = dst.offset(new_step as isize);
1602 src = src.offset(old_step as isize);
1603 }
1604
1605 ptr::write_bytes(
1607 old_ptr.offset(-(BYTES_PER_WORD as isize)),
1608 0,
1609 (u64::from(old_step) * u64::from(element_count)) as usize * BYTES_PER_WORD
1610 + POINTER_SIZE_IN_WORDS,
1611 );
1612
1613 Ok(ListBuilder {
1614 arena,
1615 segment_id: new_segment_id,
1616 cap_table,
1617 ptr: new_ptr,
1618 element_count,
1619 element_size: ElementSize::InlineComposite,
1620 step: new_step * BITS_PER_WORD as u32,
1621 struct_data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1622 struct_pointer_count: new_pointer_count,
1623 })
1624 } else {
1625 let old_data_size = data_bits_per_element(old_size);
1628 let old_pointer_count = pointers_per_element(old_size);
1629 let old_step = old_data_size + old_pointer_count * BITS_PER_POINTER as u32;
1630 let element_count = (*old_ref).list_element_count();
1631
1632 if old_size == ElementSize::Void {
1633 Ok(init_struct_list_pointer(
1635 arena,
1636 orig_ref,
1637 orig_segment_id,
1638 cap_table,
1639 element_count,
1640 element_size,
1641 ))
1642 } else {
1643 if old_size == ElementSize::Bit {
1646 return Err(Error::from_kind(
1647 ErrorKind::FoundBitListWhereStructListWasExpected,
1648 ));
1649 }
1650
1651 let mut new_data_size = element_size.data;
1652 let mut new_pointer_count = element_size.pointers;
1653
1654 if old_size == ElementSize::Pointer {
1655 new_pointer_count = ::core::cmp::max(new_pointer_count, 1);
1656 } else {
1657 new_data_size = ::core::cmp::max(new_data_size, 1);
1659 }
1660
1661 let new_step = u32::from(new_data_size)
1662 + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1663
1664 let total_words_u64 = u64::from(new_step) * u64::from(element_count);
1665 if total_words_u64 >= (1 << 29) {
1666 return Err(Error::from_kind(ErrorKind::MessageTooLarge(
1667 total_words_u64 as usize,
1668 )));
1669 }
1670 let total_words = total_words_u64 as u32;
1671
1672 zero_pointer_and_fars(arena, orig_segment_id, orig_ref)?;
1674
1675 let (mut new_ptr, new_ref, new_segment_id) = allocate(
1676 arena,
1677 orig_ref,
1678 orig_segment_id,
1679 total_words + POINTER_SIZE_IN_WORDS as u32,
1680 WirePointerKind::List,
1681 );
1682 (*new_ref).set_list_inline_composite(total_words);
1683
1684 let tag: *mut WirePointer = new_ptr as *mut _;
1685 (*tag).set_kind_and_inline_composite_list_element_count(
1686 WirePointerKind::Struct,
1687 element_count,
1688 );
1689 (*tag).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1690 new_ptr = new_ptr.add(BYTES_PER_WORD);
1691
1692 if old_size == ElementSize::Pointer {
1693 let mut dst = new_ptr.offset(new_data_size as isize * BYTES_PER_WORD as isize);
1694 let mut src: *mut WirePointer = old_ptr as *mut _;
1695 for _ in 0..element_count {
1696 transfer_pointer(arena, new_segment_id, dst as *mut _, old_segment_id, src);
1697 dst = dst.offset(new_step as isize * BYTES_PER_WORD as isize);
1698 src = src.offset(1);
1699 }
1700 } else {
1701 let mut dst = new_ptr;
1702 let mut src: *mut u8 = old_ptr;
1703 let old_byte_step = old_data_size / BITS_PER_BYTE as u32;
1704 for _ in 0..element_count {
1705 copy_nonoverlapping_check_zero(src, dst, old_byte_step as usize);
1706 src = src.offset(old_byte_step as isize);
1707 dst = dst.offset(new_step as isize * BYTES_PER_WORD as isize);
1708 }
1709 }
1710
1711 ptr::write_bytes(
1713 old_ptr,
1714 0,
1715 round_bits_up_to_bytes(u64::from(old_step) * u64::from(element_count)) as usize,
1716 );
1717
1718 Ok(ListBuilder {
1719 arena,
1720 segment_id: new_segment_id,
1721 cap_table,
1722 ptr: new_ptr,
1723 element_count,
1724 element_size: ElementSize::InlineComposite,
1725 step: new_step * BITS_PER_WORD as u32,
1726 struct_data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1727 struct_pointer_count: new_pointer_count,
1728 })
1729 }
1730 }
1731 }
1732
1733 #[inline]
1734 pub unsafe fn init_text_pointer(
1735 arena: &mut dyn BuilderArena,
1736 reff: *mut WirePointer,
1737 segment_id: u32,
1738 size: ByteCount32,
1739 ) -> SegmentAnd<text::Builder<'_>> {
1740 assert!(size < (1 << 29), "text size too large");
1741
1742 let byte_size = size + 1;
1744
1745 let (ptr, reff, segment_id) = allocate(
1747 arena,
1748 reff,
1749 segment_id,
1750 round_bytes_up_to_words(byte_size),
1751 WirePointerKind::List,
1752 );
1753
1754 (*reff).set_list_size_and_count(Byte, byte_size);
1756
1757 SegmentAnd {
1758 segment_id,
1759 value: text::Builder::new(slice::from_raw_parts_mut(ptr, size as usize)),
1760 }
1761 }
1762
1763 #[inline]
1764 pub unsafe fn set_text_pointer<'a>(
1765 arena: &'a mut dyn BuilderArena,
1766 reff: *mut WirePointer,
1767 segment_id: u32,
1768 value: crate::text::Reader<'_>,
1769 ) -> SegmentAnd<text::Builder<'a>> {
1770 let value_bytes = value.as_bytes();
1771 let mut allocation = init_text_pointer(arena, reff, segment_id, value_bytes.len() as u32);
1773 allocation
1774 .value
1775 .reborrow()
1776 .as_bytes_mut()
1777 .copy_from_slice(value_bytes);
1778 allocation
1779 }
1780
1781 #[inline]
1782 pub unsafe fn get_writable_text_pointer<'a>(
1783 arena: &'a mut dyn BuilderArena,
1784 mut reff: *mut WirePointer,
1785 mut segment_id: u32,
1786 default: Option<&'a [crate::Word]>,
1787 ) -> Result<text::Builder<'a>> {
1788 let ref_target = if (*reff).is_null() {
1789 match default {
1790 None => return Ok(text::Builder::new(&mut [])),
1791 Some(d) => {
1792 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1793 arena,
1794 segment_id,
1795 Default::default(),
1796 reff,
1797 d.as_ptr() as *const _,
1798 );
1799 reff = new_reff;
1800 segment_id = new_segment_id;
1801 new_ref_target
1802 }
1803 }
1804 } else {
1805 WirePointer::mut_target(reff)
1806 };
1807
1808 let (ptr, reff, _segment_id) = follow_builder_fars(arena, reff, ref_target, segment_id)?;
1809
1810 if (*reff).kind() != WirePointerKind::List {
1811 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1812 }
1813 if (*reff).list_element_size() != Byte {
1814 return Err(Error::from_kind(
1815 ErrorKind::ExistingListPointerIsNotByteSized,
1816 ));
1817 }
1818
1819 let count = (*reff).list_element_count();
1820 if count == 0 || *ptr.offset((count - 1) as isize) != 0 {
1821 return Err(Error::from_kind(ErrorKind::TextBlobMissingNULTerminator));
1822 }
1823
1824 Ok(text::Builder::with_pos(
1826 slice::from_raw_parts_mut(ptr, (count - 1) as usize),
1827 (count - 1) as usize,
1828 ))
1829 }
1830
1831 #[inline]
1832 pub unsafe fn init_data_pointer(
1833 arena: &mut dyn BuilderArena,
1834 reff: *mut WirePointer,
1835 segment_id: u32,
1836 size: ByteCount32,
1837 ) -> SegmentAnd<data::Builder<'_>> {
1838 let (ptr, reff, segment_id) = allocate(
1840 arena,
1841 reff,
1842 segment_id,
1843 round_bytes_up_to_words(size),
1844 WirePointerKind::List,
1845 );
1846
1847 (*reff).set_list_size_and_count(Byte, size);
1849
1850 SegmentAnd {
1851 segment_id,
1852 value: data::builder_from_raw_parts(ptr, size),
1853 }
1854 }
1855
1856 #[inline]
1857 pub unsafe fn set_data_pointer<'a>(
1858 arena: &'a mut dyn BuilderArena,
1859 reff: *mut WirePointer,
1860 segment_id: u32,
1861 value: &[u8],
1862 ) -> SegmentAnd<data::Builder<'a>> {
1863 let allocation = init_data_pointer(
1864 arena,
1865 reff,
1866 segment_id,
1867 value.len().try_into().expect("data too large"),
1868 );
1869 allocation.value.copy_from_slice(value);
1870 allocation
1871 }
1872
1873 #[inline]
1874 pub unsafe fn get_writable_data_pointer<'a>(
1875 arena: &'a mut dyn BuilderArena,
1876 mut reff: *mut WirePointer,
1877 mut segment_id: u32,
1878 default: Option<&'a [crate::Word]>,
1879 ) -> Result<data::Builder<'a>> {
1880 let ref_target = if (*reff).is_null() {
1881 match default {
1882 None => return Ok(&mut []),
1883 Some(d) => {
1884 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1885 arena,
1886 segment_id,
1887 Default::default(),
1888 reff,
1889 d.as_ptr() as *const _,
1890 );
1891 reff = new_reff;
1892 segment_id = new_segment_id;
1893 new_ref_target
1894 }
1895 }
1896 } else {
1897 WirePointer::mut_target(reff)
1898 };
1899
1900 let (ptr, reff, _segment_id) = follow_builder_fars(arena, reff, ref_target, segment_id)?;
1901
1902 if (*reff).kind() != WirePointerKind::List {
1903 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1904 }
1905 if (*reff).list_element_size() != Byte {
1906 return Err(Error::from_kind(
1907 ErrorKind::ExistingListPointerIsNotByteSized,
1908 ));
1909 }
1910
1911 Ok(data::builder_from_raw_parts(
1912 ptr,
1913 (*reff).list_element_count(),
1914 ))
1915 }
1916
1917 pub unsafe fn set_struct_pointer(
1918 arena: &mut dyn BuilderArena,
1919 segment_id: u32,
1920 cap_table: CapTableBuilder,
1921 reff: *mut WirePointer,
1922 value: StructReader,
1923 canonicalize: bool,
1924 ) -> Result<SegmentAnd<*mut u8>> {
1925 let mut data_size: ByteCount32 = round_bits_up_to_bytes(u64::from(value.data_size));
1926 let mut ptr_count = value.pointer_count;
1927
1928 if canonicalize {
1929 if !(value.data_size == 1 || value.data_size % BITS_PER_BYTE as u32 == 0) {
1931 return Err(Error::from_kind(
1932 ErrorKind::StructReaderHadBitwidthOtherThan1,
1933 ));
1934 }
1935
1936 if value.data_size == 1 {
1937 if !value.get_bool_field(0) {
1938 data_size = 0;
1939 }
1940 } else {
1941 'chop: while data_size != 0 {
1942 let end = data_size;
1943 let mut window = data_size % BYTES_PER_WORD as u32;
1944 if window == 0 {
1945 window = BYTES_PER_WORD as u32;
1946 }
1947 let start = end - window;
1948 let last_word = &value.get_data_section_as_blob()[start as usize..end as usize];
1949 if last_word == [0; 8] {
1950 data_size -= window;
1951 } else {
1952 break 'chop;
1953 }
1954 }
1955 }
1956
1957 while ptr_count != 0 && value.get_pointer_field(ptr_count as usize - 1).is_null() {
1958 ptr_count -= 1;
1959 }
1960 }
1961
1962 let data_words = round_bytes_up_to_words(data_size);
1963 let total_size: WordCount32 = data_words + u32::from(ptr_count) * WORDS_PER_POINTER as u32;
1964
1965 let (ptr, reff, segment_id) =
1966 allocate(arena, reff, segment_id, total_size, WirePointerKind::Struct);
1967 (*reff).set_struct_size_from_pieces(data_words as u16, ptr_count);
1968
1969 if value.data_size == 1 {
1970 if data_size != 0 {
1972 *ptr = u8::from(value.get_bool_field(0))
1973 }
1974 } else {
1975 copy_nonoverlapping_check_zero::<u8>(value.data, ptr, data_size as usize);
1976 }
1977
1978 let pointer_section: *mut WirePointer =
1979 ptr.offset(data_words as isize * BYTES_PER_WORD as isize) as *mut _;
1980 for i in 0..ptr_count as isize {
1981 deep_copy_pointee(
1982 arena,
1983 segment_id,
1984 cap_table,
1985 pointer_section.offset(i),
1986 value.arena,
1987 value.segment_id,
1988 value.cap_table,
1989 value.pointers.offset(i),
1990 value.nesting_limit,
1991 canonicalize,
1992 )?;
1993 }
1994
1995 Ok(SegmentAnd {
1996 segment_id,
1997 value: ptr,
1998 })
1999 }
2000
2001 #[cfg(feature = "alloc")]
2002 pub unsafe fn set_capability_pointer(
2003 _arena: &mut dyn BuilderArena,
2004 _segment_id: u32,
2005 mut cap_table: CapTableBuilder,
2006 reff: *mut WirePointer,
2007 cap: alloc::boxed::Box<dyn ClientHook>,
2008 ) {
2009 (*reff).set_cap(cap_table.inject_cap(cap) as u32);
2011 }
2012
2013 pub unsafe fn set_list_pointer(
2014 arena: &mut dyn BuilderArena,
2015 segment_id: u32,
2016 cap_table: CapTableBuilder,
2017 reff: *mut WirePointer,
2018 value: ListReader,
2019 canonicalize: bool,
2020 ) -> Result<SegmentAnd<*mut u8>> {
2021 let total_size =
2022 round_bits_up_to_words(u64::from(value.element_count) * u64::from(value.step));
2023
2024 if value.element_size != ElementSize::InlineComposite {
2025 let (ptr, reff, segment_id) =
2027 allocate(arena, reff, segment_id, total_size, WirePointerKind::List);
2028
2029 if value.struct_pointer_count == 1 {
2030 (*reff).set_list_size_and_count(Pointer, value.element_count);
2032 for i in 0..value.element_count as isize {
2033 deep_copy_pointee(
2034 arena,
2035 segment_id,
2036 cap_table,
2037 (ptr as *mut WirePointer).offset(i),
2038 value.arena,
2039 value.segment_id,
2040 value.cap_table,
2041 (value.ptr as *const WirePointer).offset(i),
2042 value.nesting_limit,
2043 canonicalize,
2044 )?;
2045 }
2046 } else {
2047 let element_size = match value.step {
2049 0 => Void,
2050 1 => Bit,
2051 8 => Byte,
2052 16 => TwoBytes,
2053 32 => FourBytes,
2054 64 => EightBytes,
2055 _ => {
2056 panic!("invalid list step size: {}", value.step)
2057 }
2058 };
2059
2060 (*reff).set_list_size_and_count(element_size, value.element_count);
2061
2062 let whole_byte_size =
2067 u64::from(value.element_count) * u64::from(value.step) / BITS_PER_BYTE as u64;
2068 copy_nonoverlapping_check_zero(value.ptr, ptr, whole_byte_size as usize);
2069
2070 let leftover_bits =
2071 u64::from(value.element_count) * u64::from(value.step) % BITS_PER_BYTE as u64;
2072 if leftover_bits > 0 {
2073 let mask: u8 = (1 << leftover_bits as u8) - 1;
2074
2075 *ptr.offset(whole_byte_size as isize) =
2076 mask & (*value.ptr.offset(whole_byte_size as isize))
2077 }
2078 }
2079
2080 Ok(SegmentAnd {
2081 segment_id,
2082 value: ptr,
2083 })
2084 } else {
2085 let decl_data_size = value.struct_data_size / BITS_PER_WORD as u32;
2088 let decl_pointer_count = value.struct_pointer_count;
2089
2090 let mut data_size = 0;
2091 let mut ptr_count = 0;
2092 let mut total_size = total_size;
2093
2094 if canonicalize {
2095 for ec in 0..value.element_count {
2096 let se = value.get_struct_element(ec);
2097 let mut local_data_size = decl_data_size;
2098 'data_chop: while local_data_size != 0 {
2099 let end = local_data_size * BYTES_PER_WORD as u32;
2100 let window = BYTES_PER_WORD as u32;
2101 let start = end - window;
2102 let last_word =
2103 &se.get_data_section_as_blob()[start as usize..end as usize];
2104 if last_word != [0; 8] {
2105 break 'data_chop;
2106 } else {
2107 local_data_size -= 1;
2108 }
2109 }
2110 if local_data_size > data_size {
2111 data_size = local_data_size;
2112 }
2113 let mut local_ptr_count = decl_pointer_count;
2114 while local_ptr_count != 0
2115 && se.get_pointer_field(local_ptr_count as usize - 1).is_null()
2116 {
2117 local_ptr_count -= 1;
2118 }
2119 if local_ptr_count > ptr_count {
2120 ptr_count = local_ptr_count;
2121 }
2122 }
2123 total_size = (data_size + u32::from(ptr_count)) * value.element_count;
2124 } else {
2125 data_size = decl_data_size;
2126 ptr_count = decl_pointer_count;
2127 }
2128
2129 let (ptr, reff, segment_id) = allocate(
2130 arena,
2131 reff,
2132 segment_id,
2133 total_size + POINTER_SIZE_IN_WORDS as u32,
2134 WirePointerKind::List,
2135 );
2136 (*reff).set_list_inline_composite(total_size);
2137
2138 let tag: *mut WirePointer = ptr as *mut _;
2139 (*tag).set_kind_and_inline_composite_list_element_count(
2140 WirePointerKind::Struct,
2141 value.element_count,
2142 );
2143 (*tag).set_struct_size_from_pieces(data_size as u16, ptr_count);
2144 let mut dst = ptr.add(BYTES_PER_WORD);
2145
2146 let mut src: *const u8 = value.ptr;
2147 for _ in 0..value.element_count {
2148 copy_nonoverlapping_check_zero(src, dst, data_size as usize * BYTES_PER_WORD);
2149 dst = dst.offset(data_size as isize * BYTES_PER_WORD as isize);
2150 src = src.offset(decl_data_size as isize * BYTES_PER_WORD as isize);
2151
2152 for _ in 0..ptr_count {
2153 deep_copy_pointee(
2154 arena,
2155 segment_id,
2156 cap_table,
2157 dst as *mut _,
2158 value.arena,
2159 value.segment_id,
2160 value.cap_table,
2161 src as *const WirePointer,
2162 value.nesting_limit,
2163 canonicalize,
2164 )?;
2165 dst = dst.add(BYTES_PER_WORD);
2166 src = src.add(BYTES_PER_WORD);
2167 }
2168
2169 src =
2170 src.offset((decl_pointer_count - ptr_count) as isize * BYTES_PER_WORD as isize);
2171 }
2172 Ok(SegmentAnd {
2173 segment_id,
2174 value: ptr,
2175 })
2176 }
2177 }
2178
2179 #[allow(clippy::too_many_arguments)]
2180 pub unsafe fn deep_copy_pointee(
2181 dst_arena: &mut dyn BuilderArena,
2182 dst_segment_id: u32,
2183 dst_cap_table: CapTableBuilder,
2184 dst: *mut WirePointer,
2185 src_arena: &dyn ReaderArena,
2186 src_segment_id: u32,
2187 src_cap_table: CapTableReader,
2188 src: *const WirePointer,
2189 nesting_limit: i32,
2190 canonicalize: bool,
2191 ) -> Result<SegmentAnd<*mut u8>> {
2192 if (*src).is_null() {
2193 ptr::write_bytes(dst, 0, 1);
2194 return Ok(SegmentAnd {
2195 segment_id: dst_segment_id,
2196 value: ptr::null_mut(),
2197 });
2198 }
2199
2200 let (mut ptr, src, src_segment_id) = follow_fars(src_arena, src, src_segment_id)?;
2201
2202 match (*src).kind() {
2203 WirePointerKind::Struct => {
2204 if nesting_limit <= 0 {
2205 return Err(Error::from_kind(
2206 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2207 ));
2208 }
2209
2210 bounds_check(
2211 src_arena,
2212 src_segment_id,
2213 ptr,
2214 (*src).struct_word_size() as usize,
2215 WirePointerKind::Struct,
2216 )?;
2217
2218 set_struct_pointer(
2219 dst_arena,
2220 dst_segment_id,
2221 dst_cap_table,
2222 dst,
2223 StructReader {
2224 arena: src_arena,
2225 segment_id: src_segment_id,
2226 cap_table: src_cap_table,
2227 data: ptr,
2228 pointers: ptr
2229 .offset((*src).struct_data_size() as isize * BYTES_PER_WORD as isize)
2230 as *const _,
2231 data_size: u32::from((*src).struct_data_size()) * BITS_PER_WORD as u32,
2232 pointer_count: (*src).struct_ptr_count(),
2233 nesting_limit: nesting_limit - 1,
2234 },
2235 canonicalize,
2236 )
2237 }
2238 WirePointerKind::List => {
2239 let element_size = (*src).list_element_size();
2240 if nesting_limit <= 0 {
2241 return Err(Error::from_kind(
2242 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2243 ));
2244 }
2245
2246 if element_size == InlineComposite {
2247 let word_count = (*src).list_inline_composite_word_count();
2248 let tag: *const WirePointer = ptr as *const _;
2249 ptr = ptr.add(BYTES_PER_WORD);
2250
2251 bounds_check(
2252 src_arena,
2253 src_segment_id,
2254 ptr.offset(-(BYTES_PER_WORD as isize)),
2255 word_count as usize + 1,
2256 WirePointerKind::List,
2257 )?;
2258
2259 if (*tag).kind() != WirePointerKind::Struct {
2260 return Err(Error::from_kind(
2261 ErrorKind::InlineCompositeListsOfNonStructTypeAreNotSupported,
2262 ));
2263 }
2264
2265 let element_count = (*tag).inline_composite_list_element_count();
2266 let words_per_element = (*tag).struct_word_size();
2267
2268 if u64::from(words_per_element) * u64::from(element_count)
2269 > u64::from(word_count)
2270 {
2271 return Err(Error::from_kind(
2272 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
2273 ));
2274 }
2275
2276 if words_per_element == 0 {
2277 amplified_read(src_arena, u64::from(element_count))?;
2280 }
2281
2282 set_list_pointer(
2283 dst_arena,
2284 dst_segment_id,
2285 dst_cap_table,
2286 dst,
2287 ListReader {
2288 arena: src_arena,
2289 segment_id: src_segment_id,
2290 cap_table: src_cap_table,
2291 ptr: ptr as *const _,
2292 element_count,
2293 element_size,
2294 step: words_per_element * BITS_PER_WORD as u32,
2295 struct_data_size: u32::from((*tag).struct_data_size())
2296 * BITS_PER_WORD as u32,
2297 struct_pointer_count: (*tag).struct_ptr_count(),
2298 nesting_limit: nesting_limit - 1,
2299 },
2300 canonicalize,
2301 )
2302 } else {
2303 let data_size = data_bits_per_element(element_size);
2304 let pointer_count = pointers_per_element(element_size);
2305 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
2306 let element_count = (*src).list_element_count();
2307 let word_count =
2308 round_bits_up_to_words(u64::from(element_count) * u64::from(step));
2309
2310 bounds_check(
2311 src_arena,
2312 src_segment_id,
2313 ptr,
2314 word_count as usize,
2315 WirePointerKind::List,
2316 )?;
2317
2318 if element_size == Void {
2319 amplified_read(src_arena, u64::from(element_count))?;
2322 }
2323
2324 set_list_pointer(
2325 dst_arena,
2326 dst_segment_id,
2327 dst_cap_table,
2328 dst,
2329 ListReader {
2330 arena: src_arena,
2331 segment_id: src_segment_id,
2332 cap_table: src_cap_table,
2333 ptr: ptr as *const _,
2334 element_count,
2335 element_size,
2336 step,
2337 struct_data_size: data_size,
2338 struct_pointer_count: pointer_count as u16,
2339 nesting_limit: nesting_limit - 1,
2340 },
2341 canonicalize,
2342 )
2343 }
2344 }
2345 WirePointerKind::Far => Err(Error::from_kind(ErrorKind::MalformedDoubleFarPointer)),
2346 WirePointerKind::Other => {
2347 if !(*src).is_capability() {
2348 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
2349 }
2350 if canonicalize {
2351 return Err(Error::from_kind(
2352 ErrorKind::CannotCreateACanonicalMessageWithACapability,
2353 ));
2354 }
2355 #[cfg(feature = "alloc")]
2356 match src_cap_table.extract_cap((*src).cap_index() as usize) {
2357 Some(cap) => {
2358 set_capability_pointer(dst_arena, dst_segment_id, dst_cap_table, dst, cap);
2359 Ok(SegmentAnd {
2360 segment_id: dst_segment_id,
2361 value: ptr::null_mut(),
2362 })
2363 }
2364 None => Err(Error::from_kind(
2365 ErrorKind::MessageContainsInvalidCapabilityPointer,
2366 )),
2367 }
2368 #[cfg(not(feature = "alloc"))]
2369 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
2370 }
2371 }
2372 }
2373
2374 #[inline]
2375 pub unsafe fn read_struct_pointer<'a>(
2376 mut arena: &'a dyn ReaderArena,
2377 mut segment_id: u32,
2378 cap_table: CapTableReader,
2379 mut reff: *const WirePointer,
2380 default: Option<&'a [crate::Word]>,
2381 nesting_limit: i32,
2382 ) -> Result<StructReader<'a>> {
2383 if (*reff).is_null() {
2384 match default {
2385 None => return Ok(StructReader::new_default()),
2386 Some(d) if (*(d.as_ptr() as *const WirePointer)).is_null() => {
2387 return Ok(StructReader::new_default())
2388 }
2389 Some(d) => {
2390 reff = d.as_ptr() as *const _;
2391 arena = &super::NULL_ARENA;
2392 segment_id = 0;
2393 }
2394 }
2395 }
2396
2397 if nesting_limit <= 0 {
2398 return Err(Error::from_kind(
2399 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2400 ));
2401 }
2402
2403 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2404
2405 let data_size_words = (*reff).struct_data_size();
2406
2407 if (*reff).kind() != WirePointerKind::Struct {
2408 return Err(Error::from_kind(
2409 ErrorKind::MessageContainsNonStructPointerWhereStructPointerWasExpected,
2410 ));
2411 }
2412
2413 bounds_check(
2414 arena,
2415 segment_id,
2416 ptr,
2417 (*reff).struct_word_size() as usize,
2418 WirePointerKind::Struct,
2419 )?;
2420
2421 Ok(StructReader {
2422 arena,
2423 segment_id,
2424 cap_table,
2425 data: ptr,
2426 pointers: ptr.offset(data_size_words as isize * BYTES_PER_WORD as isize) as *const _,
2427 data_size: u32::from(data_size_words) * BITS_PER_WORD as BitCount32,
2428 pointer_count: (*reff).struct_ptr_count(),
2429 nesting_limit: nesting_limit - 1,
2430 })
2431 }
2432
2433 #[inline]
2434 #[cfg(feature = "alloc")]
2435 pub unsafe fn read_capability_pointer(
2436 _arena: &dyn ReaderArena,
2437 _segment_id: u32,
2438 cap_table: CapTableReader,
2439 reff: *const WirePointer,
2440 _nesting_limit: i32,
2441 ) -> Result<alloc::boxed::Box<dyn ClientHook>> {
2442 if (*reff).is_null() {
2443 Err(Error::from_kind(
2444 ErrorKind::MessageContainsNullCapabilityPointer,
2445 ))
2446 } else if !(*reff).is_capability() {
2447 Err(Error::from_kind(
2448 ErrorKind::MessageContainsNonCapabilityPointerWhereCapabilityPointerWasExpected,
2449 ))
2450 } else {
2451 let n = (*reff).cap_index() as usize;
2452 match cap_table.extract_cap(n) {
2453 Some(client_hook) => Ok(client_hook),
2454 None => Err(Error::from_kind(
2455 ErrorKind::MessageContainsInvalidCapabilityPointer,
2456 )),
2457 }
2458 }
2459 }
2460
2461 #[inline]
2462 pub unsafe fn read_list_pointer(
2463 mut arena: &dyn ReaderArena,
2464 mut segment_id: u32,
2465 cap_table: CapTableReader,
2466 mut reff: *const WirePointer,
2467 default_value: *const u8,
2468 expected_element_size: Option<ElementSize>,
2469 nesting_limit: i32,
2470 ) -> Result<ListReader<'_>> {
2471 if (*reff).is_null() {
2472 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
2473 return Ok(ListReader::new_default());
2474 }
2475 reff = default_value as *const _;
2476 arena = &super::NULL_ARENA;
2477 segment_id = 0;
2478 }
2479
2480 if nesting_limit <= 0 {
2481 return Err(Error::from_kind(ErrorKind::NestingLimitExceeded));
2482 }
2483 let (mut ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2484
2485 if (*reff).kind() != WirePointerKind::List {
2486 return Err(Error::from_kind(
2487 ErrorKind::MessageContainsNonListPointerWhereListPointerWasExpected,
2488 ));
2489 }
2490
2491 let element_size = (*reff).list_element_size();
2492 match element_size {
2493 InlineComposite => {
2494 let word_count = (*reff).list_inline_composite_word_count();
2495
2496 let tag: *const WirePointer = ptr as *const WirePointer;
2497
2498 ptr = ptr.add(BYTES_PER_WORD);
2499
2500 bounds_check(
2501 arena,
2502 segment_id,
2503 ptr.offset(-(BYTES_PER_WORD as isize)),
2504 word_count as usize + 1,
2505 WirePointerKind::List,
2506 )?;
2507
2508 if (*tag).kind() != WirePointerKind::Struct {
2509 return Err(Error::from_kind(
2510 ErrorKind::InlineCompositeListsOfNonStructTypeAreNotSupported,
2511 ));
2512 }
2513
2514 let size = (*tag).inline_composite_list_element_count();
2515 let data_size = (*tag).struct_data_size();
2516 let ptr_count = (*tag).struct_ptr_count();
2517 let words_per_element = (*tag).struct_word_size();
2518
2519 if u64::from(size) * u64::from(words_per_element) > u64::from(word_count) {
2520 return Err(Error::from_kind(
2521 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
2522 ));
2523 }
2524
2525 if words_per_element == 0 {
2526 amplified_read(arena, u64::from(size))?;
2529 }
2530
2531 match expected_element_size {
2538 None | Some(Void | InlineComposite) => (),
2539 Some(Bit) => {
2540 return Err(Error::from_kind(
2541 ErrorKind::FoundStructListWhereBitListWasExpected,
2542 ));
2543 }
2544 Some(Byte | TwoBytes | FourBytes | EightBytes) => {
2545 if data_size == 0 {
2546 return Err(Error::from_kind(
2547 ErrorKind::ExpectedAPrimitiveListButGotAListOfPointerOnlyStructs,
2548 ));
2549 }
2550 }
2551 Some(Pointer) => {
2552 if ptr_count == 0 {
2553 return Err(Error::from_kind(
2554 ErrorKind::ExpectedAPointerListButGotAListOfDataOnlyStructs,
2555 ));
2556 }
2557 }
2558 }
2559
2560 Ok(ListReader {
2561 arena,
2562 segment_id,
2563 cap_table,
2564 ptr: ptr as *const _,
2565 element_count: size,
2566 element_size,
2567 step: words_per_element * BITS_PER_WORD as u32,
2568 struct_data_size: u32::from(data_size) * (BITS_PER_WORD as u32),
2569 struct_pointer_count: ptr_count,
2570 nesting_limit: nesting_limit - 1,
2571 })
2572 }
2573 _ => {
2574 let data_size = data_bits_per_element((*reff).list_element_size());
2578 let pointer_count = pointers_per_element((*reff).list_element_size());
2579 let element_count = (*reff).list_element_count();
2580 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
2581
2582 let word_count = round_bits_up_to_words(u64::from(element_count) * u64::from(step));
2583 bounds_check(
2584 arena,
2585 segment_id,
2586 ptr,
2587 word_count as usize,
2588 WirePointerKind::List,
2589 )?;
2590
2591 if element_size == Void {
2592 amplified_read(arena, u64::from(element_count))?;
2595 }
2596
2597 if let Some(expected_element_size) = expected_element_size {
2598 if element_size == ElementSize::Bit && expected_element_size != ElementSize::Bit
2599 {
2600 return Err(Error::from_kind(
2601 ErrorKind::FoundBitListWhereStructListWasExpected,
2602 ));
2603 }
2604
2605 let expected_data_bits_per_element =
2611 data_bits_per_element(expected_element_size);
2612 let expected_pointers_per_element = pointers_per_element(expected_element_size);
2613
2614 if expected_data_bits_per_element > data_size
2615 || expected_pointers_per_element > pointer_count
2616 {
2617 return Err(Error::from_kind(
2618 ErrorKind::MessageContainsListWithIncompatibleElementType,
2619 ));
2620 }
2621 }
2622
2623 Ok(ListReader {
2624 arena,
2625 segment_id,
2626 cap_table,
2627 ptr: ptr as *const _,
2628 element_count,
2629 element_size,
2630 step,
2631 struct_data_size: data_size,
2632 struct_pointer_count: pointer_count as u16,
2633 nesting_limit: nesting_limit - 1,
2634 })
2635 }
2636 }
2637 }
2638
2639 #[inline]
2640 pub unsafe fn read_text_pointer<'a>(
2641 mut arena: &'a dyn ReaderArena,
2642 mut segment_id: u32,
2643 mut reff: *const WirePointer,
2644 default: Option<&[crate::Word]>,
2645 ) -> Result<text::Reader<'a>> {
2646 if (*reff).is_null() {
2647 match default {
2648 None => return Ok("".into()),
2649 Some(d) => {
2650 reff = d.as_ptr() as *const WirePointer;
2651 arena = &super::NULL_ARENA;
2652 segment_id = 0;
2653 }
2654 }
2655 }
2656
2657 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2658 let size = (*reff).list_element_count();
2659
2660 if (*reff).kind() != WirePointerKind::List {
2661 return Err(Error::from_kind(
2662 ErrorKind::MessageContainsNonListPointerWhereTextWasExpected,
2663 ));
2664 }
2665
2666 if (*reff).list_element_size() != Byte {
2667 return Err(Error::from_kind(
2668 ErrorKind::MessageContainsListPointerOfNonBytesWhereTextWasExpected,
2669 ));
2670 }
2671
2672 bounds_check(
2673 arena,
2674 segment_id,
2675 ptr,
2676 round_bytes_up_to_words(size) as usize,
2677 WirePointerKind::List,
2678 )?;
2679
2680 if size == 0 {
2681 return Err(Error::from_kind(
2682 ErrorKind::MessageContainsTextThatIsNotNULTerminated,
2683 ));
2684 }
2685
2686 let str_ptr = ptr;
2687
2688 if (*str_ptr.offset((size - 1) as isize)) != 0u8 {
2689 return Err(Error::from_kind(
2690 ErrorKind::MessageContainsTextThatIsNotNULTerminated,
2691 ));
2692 }
2693
2694 Ok(text::Reader(slice::from_raw_parts(
2695 str_ptr,
2696 size as usize - 1,
2697 )))
2698 }
2699
2700 #[inline]
2701 pub unsafe fn read_data_pointer<'a>(
2702 mut arena: &'a dyn ReaderArena,
2703 mut segment_id: u32,
2704 mut reff: *const WirePointer,
2705 default: Option<&'a [crate::Word]>,
2706 ) -> Result<data::Reader<'a>> {
2707 if (*reff).is_null() {
2708 match default {
2709 None => return Ok(&[]),
2710 Some(d) => {
2711 reff = d.as_ptr() as *const WirePointer;
2712 arena = &super::NULL_ARENA;
2713 segment_id = 0;
2714 }
2715 }
2716 }
2717
2718 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2719
2720 let size: u32 = (*reff).list_element_count();
2721
2722 if (*reff).kind() != WirePointerKind::List {
2723 return Err(Error::from_kind(
2724 ErrorKind::MessageContainsNonListPointerWhereDataWasExpected,
2725 ));
2726 }
2727
2728 if (*reff).list_element_size() != Byte {
2729 return Err(Error::from_kind(
2730 ErrorKind::MessageContainsListPointerOfNonBytesWhereDataWasExpected,
2731 ));
2732 }
2733
2734 bounds_check(
2735 arena,
2736 segment_id,
2737 ptr,
2738 round_bytes_up_to_words(size) as usize,
2739 WirePointerKind::List,
2740 )?;
2741
2742 Ok(data::reader_from_raw_parts(ptr as *const _, size))
2743 }
2744}
2745
2746static ZERO: u64 = 0;
2747fn zero_pointer() -> *const WirePointer {
2748 &ZERO as *const _ as *const _
2749}
2750
2751static NULL_ARENA: NullArena = NullArena;
2752
2753#[cfg(feature = "alloc")]
2754pub type CapTable = alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>>;
2755
2756#[cfg(not(feature = "alloc"))]
2757pub struct CapTable;
2758
2759#[derive(Copy, Clone)]
2760pub enum CapTableReader {
2761 Plain(*const CapTable),
2765}
2766
2767impl Default for CapTableReader {
2768 fn default() -> Self {
2769 CapTableReader::Plain(ptr::null())
2770 }
2771}
2772
2773#[cfg(feature = "alloc")]
2774impl CapTableReader {
2775 pub fn extract_cap(&self, index: usize) -> Option<alloc::boxed::Box<dyn ClientHook>> {
2776 match *self {
2777 Self::Plain(hooks) => {
2778 if hooks.is_null() {
2779 return None;
2780 }
2781 let hooks: &alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2782 unsafe { &*hooks };
2783 if index >= hooks.len() {
2784 None
2785 } else {
2786 hooks[index].as_ref().map(|hook| hook.add_ref())
2787 }
2788 }
2789 }
2790 }
2791}
2792
2793#[derive(Copy, Clone)]
2794pub enum CapTableBuilder {
2795 Plain(*mut CapTable),
2799}
2800
2801impl Default for CapTableBuilder {
2802 fn default() -> Self {
2803 CapTableBuilder::Plain(ptr::null_mut())
2804 }
2805}
2806
2807impl CapTableBuilder {
2808 pub fn into_reader(self) -> CapTableReader {
2809 match self {
2810 Self::Plain(hooks) => CapTableReader::Plain(hooks),
2811 }
2812 }
2813
2814 #[cfg(feature = "alloc")]
2815 pub fn extract_cap(&self, index: usize) -> Option<alloc::boxed::Box<dyn ClientHook>> {
2816 match *self {
2817 Self::Plain(hooks) => {
2818 if hooks.is_null() {
2819 return None;
2820 }
2821 let hooks: &alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2822 unsafe { &*hooks };
2823 if index >= hooks.len() {
2824 None
2825 } else {
2826 hooks[index].as_ref().map(|hook| hook.add_ref())
2827 }
2828 }
2829 }
2830 }
2831
2832 #[cfg(feature = "alloc")]
2833 pub fn inject_cap(&mut self, cap: alloc::boxed::Box<dyn ClientHook>) -> usize {
2834 match *self {
2835 Self::Plain(hooks) => {
2836 if hooks.is_null() {
2837 panic!(
2838 "Called inject_cap() on a null capability table. You need \
2839 to call imbue_mut() on this message before adding capabilities."
2840 );
2841 }
2842 let hooks: &mut alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2843 unsafe { &mut *hooks };
2844 hooks.push(Some(cap));
2845 hooks.len() - 1
2846 }
2847 }
2848 }
2849
2850 #[cfg(feature = "alloc")]
2851 pub fn drop_cap(&mut self, index: usize) {
2852 match *self {
2853 Self::Plain(hooks) => {
2854 if hooks.is_null() {
2855 panic!(
2856 "Called drop_cap() on a null capability table. You need \
2857 to call imbue_mut() on this message before adding capabilities."
2858 );
2859 }
2860 let hooks: &mut alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2861 unsafe { &mut *hooks };
2862 if index < hooks.len() {
2863 hooks[index] = None;
2864 }
2865 }
2866 }
2867 }
2868}
2869
2870#[derive(Clone, Copy)]
2871pub struct PointerReader<'a> {
2872 arena: &'a dyn ReaderArena,
2873 cap_table: CapTableReader,
2874 pointer: *const WirePointer,
2875 segment_id: u32,
2876 nesting_limit: i32,
2877}
2878
2879impl<'a> PointerReader<'a> {
2880 pub fn new_default<'b>() -> PointerReader<'b> {
2881 PointerReader {
2882 arena: &NULL_ARENA,
2883 segment_id: 0,
2884 cap_table: Default::default(),
2885 pointer: ptr::null(),
2886 nesting_limit: 0x7fffffff,
2887 }
2888 }
2889
2890 pub unsafe fn get_root(
2891 arena: &'a dyn ReaderArena,
2892 segment_id: u32,
2893 location: *const u8,
2894 nesting_limit: i32,
2895 ) -> Result<Self> {
2896 wire_helpers::bounds_check(
2897 arena,
2898 segment_id,
2899 location as *const _,
2900 POINTER_SIZE_IN_WORDS,
2901 WirePointerKind::Struct,
2902 )?;
2903
2904 Ok(PointerReader {
2905 arena,
2906 segment_id,
2907 cap_table: Default::default(),
2908 pointer: location as *const _,
2909 nesting_limit,
2910 })
2911 }
2912
2913 pub fn reborrow(&self) -> PointerReader<'_> {
2914 PointerReader {
2915 arena: self.arena,
2916 ..*self
2917 }
2918 }
2919
2920 pub unsafe fn get_root_unchecked<'b>(location: *const u8) -> PointerReader<'b> {
2921 PointerReader {
2922 arena: &NULL_ARENA,
2923 segment_id: 0,
2924 cap_table: Default::default(),
2925 pointer: location as *const _,
2926 nesting_limit: 0x7fffffff,
2927 }
2928 }
2929
2930 pub fn get_root_from_arena(arena: &'a dyn ReaderArena) -> Result<Self> {
2931 let (segment_start, _seg_len) = arena.get_segment(0)?;
2932
2933 wire_helpers::bounds_check(
2934 arena,
2935 0,
2936 segment_start as *const _,
2937 POINTER_SIZE_IN_WORDS,
2938 WirePointerKind::Struct,
2939 )?;
2940
2941 Ok(PointerReader {
2942 arena,
2943 segment_id: 0,
2944 cap_table: Default::default(),
2945 pointer: segment_start as *const _,
2946 nesting_limit: arena.nesting_limit(),
2947 })
2948 }
2949
2950 pub fn imbue(&mut self, cap_table: CapTableReader) {
2951 self.cap_table = cap_table;
2952 }
2953
2954 #[inline]
2955 pub fn is_null(&self) -> bool {
2956 self.pointer.is_null() || unsafe { (*self.pointer).is_null() }
2957 }
2958
2959 pub fn total_size(&self) -> Result<MessageSize> {
2960 if self.pointer.is_null() {
2961 Ok(MessageSize {
2962 word_count: 0,
2963 cap_count: 0,
2964 })
2965 } else {
2966 unsafe {
2967 wire_helpers::total_size(
2968 self.arena,
2969 self.segment_id,
2970 self.pointer,
2971 self.nesting_limit,
2972 )
2973 }
2974 }
2975 }
2976
2977 pub fn get_struct(self, default: Option<&'a [crate::Word]>) -> Result<StructReader<'a>> {
2978 let reff: *const WirePointer = if self.pointer.is_null() {
2979 zero_pointer()
2980 } else {
2981 self.pointer
2982 };
2983 unsafe {
2984 wire_helpers::read_struct_pointer(
2985 self.arena,
2986 self.segment_id,
2987 self.cap_table,
2988 reff,
2989 default,
2990 self.nesting_limit,
2991 )
2992 }
2993 }
2994
2995 pub fn get_list(
2996 self,
2997 expected_element_size: ElementSize,
2998 default: Option<&'a [crate::Word]>,
2999 ) -> Result<ListReader<'a>> {
3000 let default_value: *const u8 = match default {
3001 None => core::ptr::null(),
3002 Some(d) => d.as_ptr() as *const u8,
3003 };
3004 let reff = if self.pointer.is_null() {
3005 zero_pointer()
3006 } else {
3007 self.pointer
3008 };
3009 unsafe {
3010 wire_helpers::read_list_pointer(
3011 self.arena,
3012 self.segment_id,
3013 self.cap_table,
3014 reff,
3015 default_value,
3016 Some(expected_element_size),
3017 self.nesting_limit,
3018 )
3019 }
3020 }
3021
3022 fn get_list_any_size(self, default_value: *const u8) -> Result<ListReader<'a>> {
3023 let reff = if self.pointer.is_null() {
3024 zero_pointer()
3025 } else {
3026 self.pointer
3027 };
3028 unsafe {
3029 wire_helpers::read_list_pointer(
3030 self.arena,
3031 self.segment_id,
3032 self.cap_table,
3033 reff,
3034 default_value,
3035 None,
3036 self.nesting_limit,
3037 )
3038 }
3039 }
3040
3041 pub fn get_text(self, default: Option<&[crate::Word]>) -> Result<text::Reader<'a>> {
3042 let reff = if self.pointer.is_null() {
3043 zero_pointer()
3044 } else {
3045 self.pointer
3046 };
3047 unsafe { wire_helpers::read_text_pointer(self.arena, self.segment_id, reff, default) }
3048 }
3049
3050 pub fn get_data(&self, default: Option<&'a [crate::Word]>) -> Result<data::Reader<'a>> {
3051 let reff = if self.pointer.is_null() {
3052 zero_pointer()
3053 } else {
3054 self.pointer
3055 };
3056 unsafe { wire_helpers::read_data_pointer(self.arena, self.segment_id, reff, default) }
3057 }
3058
3059 #[cfg(feature = "alloc")]
3060 pub fn get_capability(&self) -> Result<alloc::boxed::Box<dyn ClientHook>> {
3061 let reff: *const WirePointer = if self.pointer.is_null() {
3062 zero_pointer()
3063 } else {
3064 self.pointer
3065 };
3066 unsafe {
3067 wire_helpers::read_capability_pointer(
3068 self.arena,
3069 self.segment_id,
3070 self.cap_table,
3071 reff,
3072 self.nesting_limit,
3073 )
3074 }
3075 }
3076
3077 pub fn get_pointer_type(&self) -> Result<PointerType> {
3078 if self.is_null() {
3079 Ok(PointerType::Null)
3080 } else {
3081 let (_, reff, _) =
3082 unsafe { wire_helpers::follow_fars(self.arena, self.pointer, self.segment_id)? };
3083
3084 match unsafe { (*reff).kind() } {
3085 WirePointerKind::Far => Err(Error::from_kind(ErrorKind::UnexpectedFarPointer)),
3086 WirePointerKind::Struct => Ok(PointerType::Struct),
3087 WirePointerKind::List => Ok(PointerType::List),
3088 WirePointerKind::Other => {
3089 if unsafe { (*reff).is_capability() } {
3090 Ok(PointerType::Capability)
3091 } else {
3092 Err(Error::from_kind(ErrorKind::UnknownPointerType))
3093 }
3094 }
3095 }
3096 }
3097 }
3098
3099 pub fn is_canonical(&self, read_head: &Cell<*const u8>) -> Result<bool> {
3100 if self.pointer.is_null() || unsafe { !(*self.pointer).is_positional() } {
3101 return Ok(false);
3102 }
3103
3104 match self.get_pointer_type()? {
3105 PointerType::Null => Ok(true),
3106 PointerType::Struct => {
3107 let mut data_trunc = false;
3108 let mut ptr_trunc = false;
3109 let st = self.get_struct(None)?;
3110 if st.get_data_section_size() == 0 && st.get_pointer_section_size() == 0 {
3111 Ok(self.pointer as *const _ == st.get_location())
3112 } else {
3113 let result =
3114 st.is_canonical(read_head, read_head, &mut data_trunc, &mut ptr_trunc)?;
3115 Ok(result && data_trunc && ptr_trunc)
3116 }
3117 }
3118 PointerType::List => unsafe {
3119 self.get_list_any_size(ptr::null())?
3120 .is_canonical(read_head, self.pointer)
3121 },
3122 PointerType::Capability => Ok(false),
3123 }
3124 }
3125}
3126
3127pub struct PointerBuilder<'a> {
3128 arena: &'a mut dyn BuilderArena,
3129 segment_id: u32,
3130 cap_table: CapTableBuilder,
3131 pointer: *mut WirePointer,
3132}
3133
3134impl<'a> PointerBuilder<'a> {
3135 #[inline]
3136 pub fn get_root(arena: &'a mut dyn BuilderArena, segment_id: u32, location: *mut u8) -> Self {
3137 PointerBuilder {
3138 arena,
3139 cap_table: Default::default(),
3140 segment_id,
3141 pointer: location as *mut _,
3142 }
3143 }
3144
3145 #[inline]
3146 pub fn reborrow(&mut self) -> PointerBuilder<'_> {
3147 PointerBuilder {
3148 arena: self.arena,
3149 ..*self
3150 }
3151 }
3152
3153 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
3154 self.cap_table = cap_table;
3155 }
3156
3157 #[inline]
3158 pub fn is_null(&self) -> bool {
3159 unsafe { (*self.pointer).is_null() }
3160 }
3161
3162 pub fn get_struct(
3163 self,
3164 size: StructSize,
3165 default: Option<&'a [crate::Word]>,
3166 ) -> Result<StructBuilder<'a>> {
3167 unsafe {
3168 wire_helpers::get_writable_struct_pointer(
3169 self.arena,
3170 self.pointer,
3171 self.segment_id,
3172 self.cap_table,
3173 size,
3174 default,
3175 )
3176 }
3177 }
3178
3179 pub fn get_list(
3180 self,
3181 element_size: ElementSize,
3182 default: Option<&'a [crate::Word]>,
3183 ) -> Result<ListBuilder<'a>> {
3184 let default_value: *const u8 = match default {
3185 None => core::ptr::null(),
3186 Some(d) => d.as_ptr() as *const u8,
3187 };
3188 unsafe {
3189 wire_helpers::get_writable_list_pointer(
3190 self.arena,
3191 self.pointer,
3192 self.segment_id,
3193 self.cap_table,
3194 element_size,
3195 default_value,
3196 )
3197 }
3198 }
3199
3200 pub fn get_struct_list(
3201 self,
3202 element_size: StructSize,
3203 default: Option<&'a [crate::Word]>,
3204 ) -> Result<ListBuilder<'a>> {
3205 let default_value: *const u8 = match default {
3206 None => core::ptr::null(),
3207 Some(d) => d.as_ptr() as *const u8,
3208 };
3209 unsafe {
3210 wire_helpers::get_writable_struct_list_pointer(
3211 self.arena,
3212 self.pointer,
3213 self.segment_id,
3214 self.cap_table,
3215 element_size,
3216 default_value,
3217 )
3218 }
3219 }
3220
3221 pub fn get_text(self, default: Option<&'a [crate::Word]>) -> Result<text::Builder<'a>> {
3222 unsafe {
3223 wire_helpers::get_writable_text_pointer(
3224 self.arena,
3225 self.pointer,
3226 self.segment_id,
3227 default,
3228 )
3229 }
3230 }
3231
3232 pub fn get_data(self, default: Option<&'a [crate::Word]>) -> Result<data::Builder<'a>> {
3233 unsafe {
3234 wire_helpers::get_writable_data_pointer(
3235 self.arena,
3236 self.pointer,
3237 self.segment_id,
3238 default,
3239 )
3240 }
3241 }
3242
3243 #[cfg(feature = "alloc")]
3244 pub fn get_capability(&self) -> Result<alloc::boxed::Box<dyn ClientHook>> {
3245 unsafe {
3246 wire_helpers::read_capability_pointer(
3247 self.arena.as_reader(),
3248 self.segment_id,
3249 self.cap_table.into_reader(),
3250 self.pointer,
3251 i32::MAX,
3252 )
3253 }
3254 }
3255
3256 pub fn init_struct(self, size: StructSize) -> StructBuilder<'a> {
3257 unsafe {
3258 wire_helpers::init_struct_pointer(
3259 self.arena,
3260 self.pointer,
3261 self.segment_id,
3262 self.cap_table,
3263 size,
3264 )
3265 }
3266 }
3267
3268 pub fn init_list(
3269 self,
3270 element_size: ElementSize,
3271 element_count: ElementCount32,
3272 ) -> ListBuilder<'a> {
3273 unsafe {
3274 wire_helpers::init_list_pointer(
3275 self.arena,
3276 self.pointer,
3277 self.segment_id,
3278 self.cap_table,
3279 element_count,
3280 element_size,
3281 )
3282 }
3283 }
3284
3285 pub fn init_struct_list(
3286 self,
3287 element_count: ElementCount32,
3288 element_size: StructSize,
3289 ) -> ListBuilder<'a> {
3290 unsafe {
3291 wire_helpers::init_struct_list_pointer(
3292 self.arena,
3293 self.pointer,
3294 self.segment_id,
3295 self.cap_table,
3296 element_count,
3297 element_size,
3298 )
3299 }
3300 }
3301
3302 pub fn init_text(self, size: ByteCount32) -> text::Builder<'a> {
3303 unsafe {
3304 wire_helpers::init_text_pointer(self.arena, self.pointer, self.segment_id, size).value
3305 }
3306 }
3307
3308 pub fn init_data(self, size: ByteCount32) -> data::Builder<'a> {
3309 unsafe {
3310 wire_helpers::init_data_pointer(self.arena, self.pointer, self.segment_id, size).value
3311 }
3312 }
3313
3314 pub fn set_struct(&mut self, value: &StructReader, canonicalize: bool) -> Result<()> {
3315 unsafe {
3316 wire_helpers::set_struct_pointer(
3317 self.arena,
3318 self.segment_id,
3319 self.cap_table,
3320 self.pointer,
3321 *value,
3322 canonicalize,
3323 )?;
3324 Ok(())
3325 }
3326 }
3327
3328 pub fn set_list(&mut self, value: &ListReader, canonicalize: bool) -> Result<()> {
3329 unsafe {
3330 wire_helpers::set_list_pointer(
3331 self.arena,
3332 self.segment_id,
3333 self.cap_table,
3334 self.pointer,
3335 *value,
3336 canonicalize,
3337 )?;
3338 Ok(())
3339 }
3340 }
3341
3342 pub fn set_text(&mut self, value: crate::text::Reader<'_>) {
3343 unsafe {
3344 wire_helpers::set_text_pointer(self.arena, self.pointer, self.segment_id, value);
3345 }
3346 }
3347
3348 pub fn set_data(&mut self, value: &[u8]) {
3349 unsafe {
3350 wire_helpers::set_data_pointer(self.arena, self.pointer, self.segment_id, value);
3351 }
3352 }
3353
3354 #[cfg(feature = "alloc")]
3355 pub fn set_capability(&mut self, cap: alloc::boxed::Box<dyn ClientHook>) {
3356 unsafe {
3357 wire_helpers::set_capability_pointer(
3358 self.arena,
3359 self.segment_id,
3360 self.cap_table,
3361 self.pointer,
3362 cap,
3363 );
3364 }
3365 }
3366
3367 pub fn copy_from(&mut self, other: PointerReader, canonicalize: bool) -> Result<()> {
3368 if other.pointer.is_null() {
3369 if !self.pointer.is_null() {
3370 unsafe {
3371 wire_helpers::zero_object(self.arena, self.segment_id, self.pointer);
3372 *self.pointer = mem::zeroed();
3373 }
3374 }
3375 } else {
3376 unsafe {
3377 wire_helpers::deep_copy_pointee(
3378 self.arena,
3379 self.segment_id,
3380 self.cap_table,
3381 self.pointer,
3382 other.arena,
3383 other.segment_id,
3384 other.cap_table,
3385 other.pointer,
3386 other.nesting_limit,
3387 canonicalize,
3388 )?;
3389 }
3390 }
3391 Ok(())
3392 }
3393
3394 pub fn clear(&mut self) {
3395 unsafe {
3396 wire_helpers::zero_object(self.arena, self.segment_id, self.pointer);
3397 ptr::write_bytes(self.pointer, 0, 1);
3398 }
3399 }
3400
3401 pub fn as_reader(&self) -> PointerReader<'_> {
3402 PointerReader {
3403 arena: self.arena.as_reader(),
3404 segment_id: self.segment_id,
3405 cap_table: self.cap_table.into_reader(),
3406 pointer: self.pointer,
3407 nesting_limit: 0x7fffffff,
3408 }
3409 }
3410
3411 pub fn into_reader(self) -> PointerReader<'a> {
3412 PointerReader {
3413 arena: self.arena.as_reader(),
3414 segment_id: self.segment_id,
3415 cap_table: self.cap_table.into_reader(),
3416 pointer: self.pointer,
3417 nesting_limit: 0x7fffffff,
3418 }
3419 }
3420}
3421
3422#[derive(Clone, Copy)]
3423pub struct StructReader<'a> {
3424 arena: &'a dyn ReaderArena,
3425 cap_table: CapTableReader,
3426 data: *const u8,
3427 pointers: *const WirePointer,
3428 segment_id: u32,
3429 data_size: BitCount32,
3430 pointer_count: WirePointerCount16,
3431 nesting_limit: i32,
3432}
3433
3434impl<'a> StructReader<'a> {
3435 pub fn new_default<'b>() -> StructReader<'b> {
3436 StructReader {
3437 arena: &NULL_ARENA,
3438 segment_id: 0,
3439 cap_table: Default::default(),
3440 data: ptr::null(),
3441 pointers: ptr::null(),
3442 data_size: 0,
3443 pointer_count: 0,
3444 nesting_limit: 0x7fffffff,
3445 }
3446 }
3447
3448 pub fn imbue(&mut self, cap_table: CapTableReader) {
3449 self.cap_table = cap_table
3450 }
3451
3452 pub fn get_data_section_size(&self) -> BitCount32 {
3453 self.data_size
3454 }
3455
3456 pub fn get_pointer_section_size(&self) -> WirePointerCount16 {
3457 self.pointer_count
3458 }
3459
3460 pub fn get_pointer_section_as_list(&self) -> ListReader<'a> {
3461 ListReader {
3462 arena: self.arena,
3463 segment_id: self.segment_id,
3464 cap_table: self.cap_table,
3465 ptr: self.pointers as *const _,
3466 element_count: u32::from(self.pointer_count),
3467 element_size: ElementSize::Pointer,
3468 step: BITS_PER_WORD as BitCount32,
3469 struct_data_size: 0,
3470 struct_pointer_count: 0,
3471 nesting_limit: self.nesting_limit,
3472 }
3473 }
3474
3475 pub fn get_data_section_as_blob(&self) -> &'a [u8] {
3476 if self.data_size == 0 {
3477 &[]
3480 } else {
3481 unsafe {
3482 ::core::slice::from_raw_parts(self.data, self.data_size as usize / BITS_PER_BYTE)
3483 }
3484 }
3485 }
3486
3487 #[inline]
3488 pub fn get_data_field<T: Primitive + zero::Zero>(&self, offset: ElementCount) -> T {
3489 if (offset + 1) * bits_per_element::<T>() <= self.data_size as usize {
3493 let dwv: *const <T as Primitive>::Raw = self.data as *const _;
3494 unsafe { <T as Primitive>::get(&*dwv.add(offset)) }
3495 } else {
3496 T::zero()
3497 }
3498 }
3499
3500 #[inline]
3501 pub fn get_bool_field(&self, offset: ElementCount) -> bool {
3502 let boffset: BitCount32 = offset as BitCount32;
3503 if boffset < self.data_size {
3504 unsafe {
3505 let b: *const u8 = self.data.add(boffset as usize / BITS_PER_BYTE);
3506 ((*b) & (1u8 << (boffset % BITS_PER_BYTE as u32) as usize)) != 0
3507 }
3508 } else {
3509 false
3510 }
3511 }
3512
3513 #[inline]
3514 pub fn get_data_field_mask<T: Primitive + zero::Zero + Mask>(
3515 &self,
3516 offset: ElementCount,
3517 mask: <T as Mask>::T,
3518 ) -> T {
3519 Mask::mask(self.get_data_field(offset), mask)
3520 }
3521
3522 #[inline]
3523 pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool {
3524 self.get_bool_field(offset) ^ mask
3525 }
3526
3527 #[inline]
3528 pub fn get_pointer_field(&self, ptr_index: WirePointerCount) -> PointerReader<'a> {
3529 if ptr_index < self.pointer_count as WirePointerCount {
3530 PointerReader {
3531 arena: self.arena,
3532 segment_id: self.segment_id,
3533 cap_table: self.cap_table,
3534 pointer: unsafe { self.pointers.add(ptr_index) },
3535 nesting_limit: self.nesting_limit,
3536 }
3537 } else {
3538 PointerReader::new_default()
3539 }
3540 }
3541
3542 #[inline]
3543 pub fn is_pointer_field_null(&self, ptr_index: WirePointerCount) -> bool {
3544 self.get_pointer_field(ptr_index).is_null()
3545 }
3546
3547 pub fn total_size(&self) -> Result<MessageSize> {
3548 let mut result = MessageSize {
3549 word_count: u64::from(wire_helpers::round_bits_up_to_words(u64::from(
3550 self.data_size,
3551 ))) + u64::from(self.pointer_count) * WORDS_PER_POINTER as u64,
3552 cap_count: 0,
3553 };
3554
3555 for i in 0..self.pointer_count as isize {
3556 unsafe {
3557 result += wire_helpers::total_size(
3558 self.arena,
3559 self.segment_id,
3560 self.pointers.offset(i),
3561 self.nesting_limit,
3562 )?;
3563 }
3564 }
3565
3566 Ok(result)
3569 }
3570
3571 fn get_location(&self) -> *const u8 {
3572 self.data
3573 }
3574
3575 pub fn is_canonical(
3576 &self,
3577 read_head: &Cell<*const u8>,
3578 ptr_head: &Cell<*const u8>,
3579 data_trunc: &mut bool,
3580 ptr_trunc: &mut bool,
3581 ) -> Result<bool> {
3582 if self.get_location() != read_head.get() {
3583 return Ok(false);
3584 }
3585
3586 if self.get_data_section_size() % BITS_PER_WORD as u32 != 0 {
3587 return Ok(false);
3589 }
3590
3591 let data_size = self.get_data_section_size() / BITS_PER_WORD as u32;
3592
3593 if data_size != 0 {
3595 *data_trunc = self.get_data_field::<u64>((data_size - 1) as usize) != 0;
3596 } else {
3597 *data_trunc = true;
3598 }
3599
3600 if self.pointer_count != 0 {
3601 *ptr_trunc = !self
3602 .get_pointer_field(self.pointer_count as usize - 1)
3603 .is_null();
3604 } else {
3605 *ptr_trunc = true;
3606 }
3607
3608 read_head.set(unsafe {
3609 (read_head.get()).offset(
3610 (data_size as isize + self.pointer_count as isize) * (BYTES_PER_WORD as isize),
3611 )
3612 });
3613
3614 for ptr_idx in 0..self.pointer_count {
3615 if !self
3616 .get_pointer_field(ptr_idx as usize)
3617 .is_canonical(ptr_head)?
3618 {
3619 return Ok(false);
3620 }
3621 }
3622
3623 Ok(true)
3624 }
3625}
3626
3627pub struct StructBuilder<'a> {
3628 arena: &'a mut dyn BuilderArena,
3629 cap_table: CapTableBuilder,
3630 data: *mut u8,
3631 pointers: *mut WirePointer,
3632 segment_id: u32,
3633 data_size: BitCount32,
3634 pointer_count: WirePointerCount16,
3635}
3636
3637impl<'a> StructBuilder<'a> {
3638 #[inline]
3639 pub fn reborrow(&mut self) -> StructBuilder<'_> {
3640 StructBuilder {
3641 arena: self.arena,
3642 ..*self
3643 }
3644 }
3645
3646 pub fn as_reader(&self) -> StructReader<'_> {
3647 StructReader {
3648 arena: self.arena.as_reader(),
3649 cap_table: self.cap_table.into_reader(),
3650 data: self.data,
3651 pointers: self.pointers,
3652 pointer_count: self.pointer_count,
3653 segment_id: self.segment_id,
3654 data_size: self.data_size,
3655 nesting_limit: 0x7fffffff,
3656 }
3657 }
3658
3659 pub fn into_reader(self) -> StructReader<'a> {
3660 StructReader {
3661 arena: self.arena.as_reader(),
3662 cap_table: self.cap_table.into_reader(),
3663 data: self.data,
3664 pointers: self.pointers,
3665 pointer_count: self.pointer_count,
3666 segment_id: self.segment_id,
3667 data_size: self.data_size,
3668 nesting_limit: 0x7fffffff,
3669 }
3670 }
3671
3672 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
3673 self.cap_table = cap_table
3674 }
3675
3676 #[inline]
3677 pub fn set_data_field<T: Primitive>(&self, offset: ElementCount, value: T) {
3678 let ptr: *mut <T as Primitive>::Raw = self.data as *mut _;
3679 unsafe { <T as Primitive>::set(&mut *ptr.add(offset), value) }
3680 }
3681
3682 #[inline]
3683 pub fn set_data_field_mask<T: Primitive + Mask>(
3684 &self,
3685 offset: ElementCount,
3686 value: T,
3687 mask: <T as Mask>::T,
3688 ) {
3689 self.set_data_field(offset, Mask::mask(value, mask));
3690 }
3691
3692 #[inline]
3693 pub fn get_data_field<T: Primitive>(&self, offset: ElementCount) -> T {
3694 let ptr: *const <T as Primitive>::Raw = self.data as *const _;
3695 unsafe { <T as Primitive>::get(&*ptr.add(offset)) }
3696 }
3697
3698 #[inline]
3699 pub fn get_data_field_mask<T: Primitive + Mask>(
3700 &self,
3701 offset: ElementCount,
3702 mask: <T as Mask>::T,
3703 ) -> T {
3704 Mask::mask(self.get_data_field(offset), mask)
3705 }
3706
3707 #[inline]
3708 pub fn set_bool_field(&self, offset: ElementCount, value: bool) {
3709 let boffset: BitCount0 = offset;
3712 let b = unsafe { self.data.add(boffset / BITS_PER_BYTE) };
3713 let bitnum = boffset % BITS_PER_BYTE;
3714 unsafe { (*b) = ((*b) & !(1 << bitnum)) | (u8::from(value) << bitnum) }
3715 }
3716
3717 #[inline]
3718 pub fn set_bool_field_mask(&self, offset: ElementCount, value: bool, mask: bool) {
3719 self.set_bool_field(offset, value ^ mask);
3720 }
3721
3722 #[inline]
3723 pub fn get_bool_field(&self, offset: ElementCount) -> bool {
3724 let boffset: BitCount0 = offset;
3725 let b = unsafe { self.data.add(boffset / BITS_PER_BYTE) };
3726 unsafe { ((*b) & (1 << (boffset % BITS_PER_BYTE))) != 0 }
3727 }
3728
3729 #[inline]
3730 pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool {
3731 self.get_bool_field(offset) ^ mask
3732 }
3733
3734 #[inline]
3735 pub fn get_pointer_field(self, ptr_index: WirePointerCount) -> PointerBuilder<'a> {
3736 PointerBuilder {
3737 arena: self.arena,
3738 segment_id: self.segment_id,
3739 cap_table: self.cap_table,
3740 pointer: unsafe { self.pointers.add(ptr_index) },
3741 }
3742 }
3743
3744 #[inline]
3745 pub fn is_pointer_field_null(&self, ptr_index: WirePointerCount) -> bool {
3746 unsafe { (*self.pointers.add(ptr_index)).is_null() }
3747 }
3748
3749 pub fn copy_content_from(&mut self, other: &StructReader) -> Result<()> {
3750 use core::cmp::min;
3751 let shared_data_size = min(self.data_size, other.data_size);
3753 let shared_pointer_count = min(self.pointer_count, other.pointer_count);
3754
3755 if (shared_data_size > 0 && other.data == self.data)
3756 || (shared_pointer_count > 0 && other.pointers == self.pointers)
3757 {
3758 if (shared_data_size == 0 || other.data == self.data)
3761 && (shared_pointer_count == 0 || other.pointers == self.pointers)
3762 {
3763 return Err(Error::from_kind(
3764 ErrorKind::OnlyOneOfTheSectionPointersIsPointingToOurself,
3765 ));
3766 }
3767
3768 return Ok(());
3770 }
3771
3772 unsafe {
3773 if self.data_size > shared_data_size {
3774 if self.data_size == 1 {
3777 self.set_bool_field(0, false);
3778 } else {
3779 let unshared = self
3780 .data
3781 .offset((shared_data_size / BITS_PER_BYTE as u32) as isize);
3782 ptr::write_bytes(
3783 unshared,
3784 0,
3785 ((self.data_size - shared_data_size) / BITS_PER_BYTE as u32) as usize,
3786 );
3787 }
3788 }
3789
3790 if shared_data_size == 1 {
3792 self.set_bool_field(0, other.get_bool_field(0));
3793 } else {
3794 wire_helpers::copy_nonoverlapping_check_zero(
3795 other.data,
3796 self.data,
3797 (shared_data_size / BITS_PER_BYTE as u32) as usize,
3798 );
3799 }
3800
3801 for i in 0..self.pointer_count as isize {
3803 wire_helpers::zero_object(
3804 self.arena,
3805 self.segment_id,
3806 self.pointers.offset(i) as *mut _,
3807 );
3808 }
3809 ptr::write_bytes(self.pointers, 0u8, self.pointer_count as usize);
3810
3811 for i in 0..shared_pointer_count as isize {
3812 wire_helpers::deep_copy_pointee(
3813 self.arena,
3814 self.segment_id,
3815 self.cap_table,
3816 self.pointers.offset(i),
3817 other.arena,
3818 other.segment_id,
3819 other.cap_table,
3820 other.pointers.offset(i),
3821 other.nesting_limit,
3822 false,
3823 )?;
3824 }
3825 }
3826
3827 Ok(())
3828 }
3829}
3830
3831#[derive(Clone, Copy)]
3832pub struct ListReader<'a> {
3833 arena: &'a dyn ReaderArena,
3834 cap_table: CapTableReader,
3835 ptr: *const u8,
3836 segment_id: u32,
3837 element_count: ElementCount32,
3838 step: BitCount32,
3839 struct_data_size: BitCount32,
3840 nesting_limit: i32,
3841 struct_pointer_count: WirePointerCount16,
3842 element_size: ElementSize,
3843}
3844
3845impl<'a> ListReader<'a> {
3846 pub fn new_default<'b>() -> ListReader<'b> {
3847 ListReader {
3848 arena: &NULL_ARENA,
3849 segment_id: 0,
3850 cap_table: Default::default(),
3851 ptr: ptr::null(),
3852 element_count: 0,
3853 element_size: ElementSize::Void,
3854 step: 0,
3855 struct_data_size: 0,
3856 struct_pointer_count: 0,
3857 nesting_limit: 0x7fffffff,
3858 }
3859 }
3860
3861 pub fn imbue(&mut self, cap_table: CapTableReader) {
3862 self.cap_table = cap_table
3863 }
3864
3865 #[inline]
3866 pub fn len(&self) -> ElementCount32 {
3867 self.element_count
3868 }
3869
3870 pub fn is_empty(&self) -> bool {
3871 self.len() == 0
3872 }
3873
3874 pub(crate) fn get_step_size_in_bits(&self) -> u32 {
3875 self.step
3876 }
3877
3878 pub(crate) fn get_element_size(&self) -> ElementSize {
3879 self.element_size
3880 }
3881
3882 pub(crate) fn into_raw_bytes(self) -> &'a [u8] {
3883 if self.element_count == 0 {
3884 &[]
3887 } else {
3888 let num_bytes = wire_helpers::round_bits_up_to_bytes(
3889 u64::from(self.step) * u64::from(self.element_count),
3890 ) as usize;
3891 unsafe { ::core::slice::from_raw_parts(self.ptr, num_bytes) }
3892 }
3893 }
3894
3895 #[inline]
3896 pub fn get_struct_element(&self, index: ElementCount32) -> StructReader<'a> {
3897 let index_byte: ByteCount32 =
3898 ((u64::from(index) * u64::from(self.step)) / BITS_PER_BYTE as u64) as u32;
3899
3900 let struct_data: *const u8 = unsafe { self.ptr.offset(index_byte as isize) };
3901
3902 let struct_pointers: *const WirePointer =
3903 unsafe { struct_data.add(self.struct_data_size as usize / BITS_PER_BYTE) as *const _ };
3904
3905 StructReader {
3906 arena: self.arena,
3907 segment_id: self.segment_id,
3908 cap_table: self.cap_table,
3909 data: struct_data,
3910 pointers: struct_pointers,
3911 data_size: self.struct_data_size,
3912 pointer_count: self.struct_pointer_count,
3913 nesting_limit: self.nesting_limit - 1,
3914 }
3915 }
3916
3917 #[inline]
3918 pub fn get_pointer_element(self, index: ElementCount32) -> PointerReader<'a> {
3919 let offset = (self.struct_data_size as u64 / BITS_PER_BYTE as u64
3920 + u64::from(index) * u64::from(self.step) / BITS_PER_BYTE as u64)
3921 as isize;
3922 PointerReader {
3923 arena: self.arena,
3924 segment_id: self.segment_id,
3925 cap_table: self.cap_table,
3926 pointer: unsafe { self.ptr.offset(offset) } as *const _,
3927 nesting_limit: self.nesting_limit,
3928 }
3929 }
3930
3931 pub unsafe fn is_canonical(
3932 &self,
3933 read_head: &Cell<*const u8>,
3934 reff: *const WirePointer,
3935 ) -> Result<bool> {
3936 match self.element_size {
3937 ElementSize::InlineComposite => {
3938 read_head.set(unsafe { read_head.get().add(BYTES_PER_WORD) }); if !core::ptr::eq(self.ptr, read_head.get()) {
3940 return Ok(false);
3941 }
3942 if self.struct_data_size % BITS_PER_WORD as u32 != 0 {
3943 return Ok(false);
3944 }
3945 let struct_size = (self.struct_data_size / BITS_PER_WORD as u32)
3946 + u32::from(self.struct_pointer_count);
3947 let word_count = unsafe { (*reff).list_inline_composite_word_count() };
3948 if struct_size * self.element_count != word_count {
3949 return Ok(false);
3950 }
3951 if struct_size == 0 {
3952 return Ok(true);
3953 }
3954 let list_end = unsafe {
3955 read_head
3956 .get()
3957 .add((self.element_count * struct_size) as usize * BYTES_PER_WORD)
3958 };
3959 let pointer_head = Cell::new(list_end);
3960 let mut list_data_trunc = false;
3961 let mut list_ptr_trunc = false;
3962 for idx in 0..self.element_count {
3963 let mut data_trunc = false;
3964 let mut ptr_trunc = false;
3965 if !self.get_struct_element(idx).is_canonical(
3966 read_head,
3967 &pointer_head,
3968 &mut data_trunc,
3969 &mut ptr_trunc,
3970 )? {
3971 return Ok(false);
3972 }
3973 list_data_trunc |= data_trunc;
3974 list_ptr_trunc |= ptr_trunc;
3975 }
3976 assert_eq!(read_head.get(), list_end);
3977 read_head.set(pointer_head.get());
3978 Ok(list_data_trunc && list_ptr_trunc)
3979 }
3980 ElementSize::Pointer => {
3981 if !core::ptr::eq(self.ptr, read_head.get()) {
3982 return Ok(false);
3983 }
3984 read_head.set(unsafe {
3985 read_head
3986 .get()
3987 .offset(self.element_count as isize * BYTES_PER_WORD as isize)
3988 });
3989 for idx in 0..self.element_count {
3990 if !self.get_pointer_element(idx).is_canonical(read_head)? {
3991 return Ok(false);
3992 }
3993 }
3994 Ok(true)
3995 }
3996 element_size => {
3997 if !core::ptr::eq(self.ptr, read_head.get()) {
3998 return Ok(false);
3999 }
4000 let bit_size =
4001 u64::from(self.element_count) * u64::from(data_bits_per_element(element_size));
4002 let mut word_size = bit_size / BITS_PER_WORD as u64;
4003 if bit_size % BITS_PER_WORD as u64 != 0 {
4004 word_size += 1
4005 }
4006
4007 let byte_size = bit_size / BITS_PER_BYTE as u64;
4008 let mut byte_read_head: *const u8 = read_head.get();
4009 byte_read_head = unsafe { byte_read_head.offset(byte_size as isize) };
4010 let read_head_end = unsafe {
4011 read_head
4012 .get()
4013 .offset(word_size as isize * BYTES_PER_WORD as isize)
4014 };
4015
4016 let leftover_bits = bit_size % BITS_PER_BYTE as u64;
4017 if leftover_bits > 0 {
4018 let mask: u8 = !((1 << leftover_bits as u8) - 1);
4019 let partial_byte = unsafe { *byte_read_head };
4020
4021 if partial_byte & mask != 0 {
4022 return Ok(false);
4023 }
4024 byte_read_head = unsafe { byte_read_head.offset(1_isize) };
4025 }
4026
4027 while byte_read_head != read_head_end {
4028 if unsafe { *byte_read_head } != 0 {
4029 return Ok(false);
4030 }
4031 byte_read_head = unsafe { byte_read_head.offset(1_isize) };
4032 }
4033
4034 read_head.set(read_head_end);
4035 Ok(true)
4036 }
4037 }
4038 }
4039}
4040
4041pub struct ListBuilder<'a> {
4042 arena: &'a mut dyn BuilderArena,
4043 cap_table: CapTableBuilder,
4044 ptr: *mut u8,
4045 segment_id: u32,
4046 element_count: ElementCount32,
4047 step: BitCount32,
4048 struct_data_size: BitCount32,
4049 struct_pointer_count: WirePointerCount16,
4050 element_size: ElementSize,
4051}
4052
4053impl<'a> ListBuilder<'a> {
4054 #[inline]
4055 pub fn new_default(arena: &mut dyn BuilderArena) -> ListBuilder<'_> {
4056 ListBuilder {
4057 arena,
4058 segment_id: 0,
4059 cap_table: Default::default(),
4060 ptr: ptr::null_mut(),
4061 element_count: 0,
4062 element_size: ElementSize::Void,
4063 step: 0,
4064 struct_data_size: 0,
4065 struct_pointer_count: 0,
4066 }
4067 }
4068
4069 pub fn into_reader(self) -> ListReader<'a> {
4070 ListReader {
4071 arena: self.arena.as_reader(),
4072 segment_id: self.segment_id,
4073 cap_table: self.cap_table.into_reader(),
4074 ptr: self.ptr as *const _,
4075 element_count: self.element_count,
4076 element_size: self.element_size,
4077 step: self.step,
4078 struct_data_size: self.struct_data_size,
4079 struct_pointer_count: self.struct_pointer_count,
4080 nesting_limit: 0x7fffffff,
4081 }
4082 }
4083
4084 #[inline]
4085 pub fn reborrow(&mut self) -> ListBuilder<'_> {
4086 ListBuilder {
4087 arena: self.arena,
4088 ..*self
4089 }
4090 }
4091
4092 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
4093 self.cap_table = cap_table
4094 }
4095
4096 #[inline]
4097 pub fn len(&self) -> ElementCount32 {
4098 self.element_count
4099 }
4100
4101 pub fn is_empty(&self) -> bool {
4102 self.len() == 0
4103 }
4104
4105 #[inline]
4106 pub fn get_struct_element(self, index: ElementCount32) -> StructBuilder<'a> {
4107 let index_byte = ((u64::from(index) * u64::from(self.step)) / BITS_PER_BYTE as u64) as u32;
4108 let struct_data = unsafe { self.ptr.offset(index_byte as isize) };
4109 let struct_pointers =
4110 unsafe { struct_data.add((self.struct_data_size as usize) / BITS_PER_BYTE) as *mut _ };
4111 StructBuilder {
4112 arena: self.arena,
4113 segment_id: self.segment_id,
4114 cap_table: self.cap_table,
4115 data: struct_data,
4116 pointers: struct_pointers,
4117 data_size: self.struct_data_size,
4118 pointer_count: self.struct_pointer_count,
4119 }
4120 }
4121
4122 pub(crate) fn get_element_size(&self) -> ElementSize {
4123 self.element_size
4124 }
4125
4126 #[inline]
4127 pub fn get_pointer_element(self, index: ElementCount32) -> PointerBuilder<'a> {
4128 let offset = (u64::from(index) * u64::from(self.step) / BITS_PER_BYTE as u64) as u32;
4129 PointerBuilder {
4130 arena: self.arena,
4131 segment_id: self.segment_id,
4132 cap_table: self.cap_table,
4133 pointer: unsafe { self.ptr.offset(offset as isize) } as *mut _,
4134 }
4135 }
4136
4137 pub(crate) fn as_raw_bytes(&self) -> &'a mut [u8] {
4138 if self.element_count == 0 {
4139 &mut []
4142 } else {
4143 let num_bytes = wire_helpers::round_bits_up_to_bytes(
4144 u64::from(self.step) * u64::from(self.element_count),
4145 ) as usize;
4146 unsafe { ::core::slice::from_raw_parts_mut(self.ptr, num_bytes) }
4147 }
4148 }
4149}
4150
4151pub trait PrimitiveElement {
4155 fn get(list_reader: &ListReader, index: ElementCount32) -> Self;
4157
4158 fn get_from_builder(list_builder: &ListBuilder, index: ElementCount32) -> Self;
4160
4161 fn set(list_builder: &ListBuilder, index: ElementCount32, value: Self);
4163
4164 fn element_size() -> ElementSize;
4166}
4167
4168impl<T: Primitive> PrimitiveElement for T {
4169 #[inline]
4170 fn get(list_reader: &ListReader, index: ElementCount32) -> Self {
4171 let offset = (u64::from(index) * u64::from(list_reader.step) / BITS_PER_BYTE as u64) as u32;
4172 unsafe {
4173 let ptr: *const u8 = list_reader.ptr.offset(offset as isize);
4174 <Self as Primitive>::get(&*(ptr as *const <Self as Primitive>::Raw))
4175 }
4176 }
4177
4178 #[inline]
4179 fn get_from_builder(list_builder: &ListBuilder, index: ElementCount32) -> Self {
4180 let offset =
4181 (u64::from(index) * u64::from(list_builder.step) / BITS_PER_BYTE as u64) as u32;
4182 unsafe {
4183 let ptr: *mut <Self as Primitive>::Raw =
4184 list_builder.ptr.offset(offset as isize) as *mut _;
4185 <Self as Primitive>::get(&*ptr)
4186 }
4187 }
4188
4189 #[inline]
4190 fn set(list_builder: &ListBuilder, index: ElementCount32, value: Self) {
4191 let offset =
4192 (u64::from(index) * u64::from(list_builder.step) / BITS_PER_BYTE as u64) as u32;
4193 unsafe {
4194 let ptr: *mut <Self as Primitive>::Raw =
4195 list_builder.ptr.offset(offset as isize) as *mut _;
4196 <Self as Primitive>::set(&mut *ptr, value);
4197 }
4198 }
4199
4200 fn element_size() -> ElementSize {
4201 match mem::size_of::<Self>() {
4202 0 => Void,
4203 1 => Byte,
4204 2 => TwoBytes,
4205 4 => FourBytes,
4206 8 => EightBytes,
4207 _ => unreachable!(),
4208 }
4209 }
4210}
4211
4212impl PrimitiveElement for bool {
4213 #[inline]
4214 fn get(list: &ListReader, index: ElementCount32) -> Self {
4215 let bindex = u64::from(index) * u64::from(list.step);
4216 unsafe {
4217 let b: *const u8 = list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize);
4218 ((*b) & (1 << (bindex % BITS_PER_BYTE as u64))) != 0
4219 }
4220 }
4221 #[inline]
4222 fn get_from_builder(list: &ListBuilder, index: ElementCount32) -> Self {
4223 let bindex = u64::from(index) * u64::from(list.step);
4224 let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize) };
4225 unsafe { ((*b) & (1 << (bindex % BITS_PER_BYTE as u64))) != 0 }
4226 }
4227 #[inline]
4228 fn set(list: &ListBuilder, index: ElementCount32, value: Self) {
4229 let bindex = u64::from(index) * u64::from(list.step);
4230 let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize) };
4231
4232 let bitnum = bindex % BITS_PER_BYTE as u64;
4233 unsafe { (*b) = ((*b) & !(1 << bitnum)) | (u8::from(value) << bitnum) }
4234 }
4235 fn element_size() -> ElementSize {
4236 Bit
4237 }
4238}
4239
4240impl PrimitiveElement for () {
4241 #[inline]
4242 fn get(_list: &ListReader, _index: ElementCount32) {}
4243
4244 #[inline]
4245 fn get_from_builder(_list: &ListBuilder, _index: ElementCount32) {}
4246
4247 #[inline]
4248 fn set(_list: &ListBuilder, _index: ElementCount32, _value: ()) {}
4249
4250 fn element_size() -> ElementSize {
4251 Void
4252 }
4253}