1use core::cell::Cell;
23use core::mem;
24use core::ptr;
25
26use crate::data;
27use crate::private::arena::{BuilderArena, NullArena, ReaderArena, SegmentId};
28#[cfg(feature = "alloc")]
29use crate::private::capability::ClientHook;
30use crate::private::mask::Mask;
31use crate::private::primitive::{Primitive, WireValue};
32use crate::private::units::*;
33use crate::private::zero;
34use crate::text;
35use crate::{Error, ErrorKind, MessageSize, Result};
36
37pub use self::ElementSize::{
38 Bit, Byte, EightBytes, FourBytes, InlineComposite, Pointer, TwoBytes, Void,
39};
40
41#[repr(u8)]
42#[derive(Clone, Copy, Debug, PartialEq)]
43pub enum ElementSize {
44 Void = 0,
45 Bit = 1,
46 Byte = 2,
47 TwoBytes = 3,
48 FourBytes = 4,
49 EightBytes = 5,
50 Pointer = 6,
51 InlineComposite = 7,
52}
53
54impl ElementSize {
55 fn from(val: u8) -> Self {
56 match val {
57 0 => Self::Void,
58 1 => Self::Bit,
59 2 => Self::Byte,
60 3 => Self::TwoBytes,
61 4 => Self::FourBytes,
62 5 => Self::EightBytes,
63 6 => Self::Pointer,
64 7 => Self::InlineComposite,
65 _ => panic!("illegal element size: {val}"),
66 }
67 }
68}
69
70pub fn data_bits_per_element(size: ElementSize) -> BitCount32 {
71 match size {
72 Void => 0,
73 Bit => 1,
74 Byte => 8,
75 TwoBytes => 16,
76 FourBytes => 32,
77 EightBytes => 64,
78 Pointer => 0,
79 InlineComposite => 0,
80 }
81}
82
83pub fn pointers_per_element(size: ElementSize) -> WirePointerCount32 {
84 match size {
85 Pointer => 1,
86 _ => 0,
87 }
88}
89
90#[derive(Clone, Copy, Debug)]
91pub struct StructSize {
92 pub data: WordCount16,
93 pub pointers: WirePointerCount16,
94}
95
96impl StructSize {
97 pub fn total(&self) -> WordCount32 {
98 u32::from(self.data) + u32::from(self.pointers) * WORDS_PER_POINTER as WordCount32
99 }
100}
101
102#[repr(u8)]
103#[derive(Clone, Copy, PartialEq)]
104pub enum WirePointerKind {
105 Struct = 0,
106 List = 1,
107 Far = 2,
108 Other = 3,
109}
110
111pub enum PointerType {
112 Null,
113 Struct,
114 List,
115 Capability,
116}
117
118impl WirePointerKind {
119 fn from(val: u8) -> Self {
120 match val {
121 0 => Self::Struct,
122 1 => Self::List,
123 2 => Self::Far,
124 3 => Self::Other,
125 _ => panic!("illegal element size: {val}"),
126 }
127 }
128}
129
130#[repr(C)]
131pub struct WirePointer {
132 offset_and_kind: WireValue<u32>,
133 upper32bits: WireValue<u32>,
134}
135
136#[test]
137#[cfg(feature = "unaligned")]
138fn wire_pointer_align() {
139 assert_eq!(core::mem::align_of::<WirePointer>(), 1);
141}
142
143impl WirePointer {
144 #[inline]
145 pub fn kind(&self) -> WirePointerKind {
146 WirePointerKind::from(self.offset_and_kind.get() as u8 & 3)
147 }
148
149 #[inline]
150 pub fn is_positional(&self) -> bool {
151 (self.offset_and_kind.get() & 2) == 0 }
153
154 #[inline]
155 pub fn is_capability(&self) -> bool {
156 self.offset_and_kind.get() == WirePointerKind::Other as u32
157 }
158
159 #[inline]
160 pub unsafe fn target(ptr: *const Self) -> *const u8 {
161 let this_addr: *const u8 = ptr as *const _;
162 unsafe { this_addr.offset(8 * (1 + (((*ptr).offset_and_kind.get() as i32) >> 2)) as isize) }
163 }
164
165 #[inline]
168 fn target_from_segment(
169 ptr: *const Self,
170 arena: &dyn ReaderArena,
171 segment_id: u32,
172 ) -> Result<*const u8> {
173 let this_addr: *const u8 = ptr as *const _;
174 unsafe {
175 let offset = 1 + (((*ptr).offset_and_kind.get() as i32) >> 2);
176 arena.check_offset(segment_id, this_addr, offset)
177 }
178 }
179
180 #[inline]
183 fn mut_target(ptr: *mut Self) -> *mut u8 {
184 let this_addr: *mut u8 = ptr as *mut _;
185 unsafe {
186 this_addr.wrapping_offset(
187 BYTES_PER_WORD as isize
188 * (1 + (((*ptr).offset_and_kind.get() as i32) >> 2)) as isize,
189 )
190 }
191 }
192
193 #[inline]
194 pub fn set_kind_and_target(&mut self, kind: WirePointerKind, target: *mut u8) {
195 let this_addr: isize = self as *const _ as isize;
196 let target_addr: isize = target as *const _ as isize;
197 self.offset_and_kind.set(
198 ((((target_addr - this_addr) / BYTES_PER_WORD as isize) as i32 - 1) << 2) as u32
199 | (kind as u32),
200 )
201 }
202
203 #[inline]
204 pub fn set_kind_with_zero_offset(&mut self, kind: WirePointerKind) {
205 self.offset_and_kind.set(kind as u32)
206 }
207
208 #[inline]
209 pub fn set_kind_and_target_for_empty_struct(&mut self) {
210 self.offset_and_kind.set(0xfffffffc);
220 }
221
222 #[inline]
223 pub fn inline_composite_list_element_count(&self) -> ElementCount32 {
224 self.offset_and_kind.get() >> 2
225 }
226
227 #[inline]
228 pub fn set_kind_and_inline_composite_list_element_count(
229 &mut self,
230 kind: WirePointerKind,
231 element_count: ElementCount32,
232 ) {
233 self.offset_and_kind
234 .set((element_count << 2) | (kind as u32))
235 }
236
237 #[inline]
238 pub fn far_position_in_segment(&self) -> WordCount32 {
239 self.offset_and_kind.get() >> 3
240 }
241
242 #[inline]
243 pub fn is_double_far(&self) -> bool {
244 ((self.offset_and_kind.get() >> 2) & 1) != 0
245 }
246
247 #[inline]
248 pub fn set_far(&mut self, is_double_far: bool, pos: WordCount32) {
249 self.offset_and_kind
250 .set((pos << 3) | (u32::from(is_double_far) << 2) | WirePointerKind::Far as u32);
251 }
252
253 #[inline]
254 pub fn set_cap(&mut self, index: u32) {
255 self.offset_and_kind.set(WirePointerKind::Other as u32);
256 self.upper32bits.set(index);
257 }
258
259 #[inline]
260 pub fn struct_data_size(&self) -> WordCount16 {
261 self.upper32bits.get() as WordCount16
262 }
263
264 #[inline]
265 pub fn struct_ptr_count(&self) -> WordCount16 {
266 (self.upper32bits.get() >> 16) as WordCount16
267 }
268
269 #[inline]
270 pub fn struct_word_size(&self) -> WordCount32 {
271 u32::from(self.struct_data_size())
272 + u32::from(self.struct_ptr_count()) * WORDS_PER_POINTER as u32
273 }
274
275 #[inline]
276 pub fn set_struct_size(&mut self, size: StructSize) {
277 self.upper32bits
278 .set(u32::from(size.data) | (u32::from(size.pointers) << 16))
279 }
280
281 #[inline]
282 pub fn set_struct_size_from_pieces(&mut self, ds: WordCount16, rc: WirePointerCount16) {
283 self.set_struct_size(StructSize {
284 data: ds,
285 pointers: rc,
286 })
287 }
288
289 #[inline]
290 pub fn list_element_size(&self) -> ElementSize {
291 ElementSize::from(self.upper32bits.get() as u8 & 7)
292 }
293
294 #[inline]
295 pub fn list_element_count(&self) -> ElementCount32 {
296 self.upper32bits.get() >> 3
297 }
298
299 #[inline]
300 pub fn list_inline_composite_word_count(&self) -> WordCount32 {
301 self.list_element_count()
302 }
303
304 #[inline]
305 pub fn set_list_size_and_count(&mut self, es: ElementSize, ec: ElementCount32) {
306 assert!(ec < (1 << 29), "Lists are limited to 2**29 elements");
307 self.upper32bits.set((ec << 3) | (es as u32));
308 }
309
310 #[inline]
311 pub fn set_list_inline_composite(&mut self, wc: WordCount32) {
312 assert!(
313 wc < (1 << 29),
314 "Inline composite lists are limited to 2**29 words"
315 );
316 self.upper32bits.set((wc << 3) | (InlineComposite as u32));
317 }
318
319 #[inline]
320 pub fn far_segment_id(&self) -> SegmentId {
321 self.upper32bits.get() as SegmentId
322 }
323
324 #[inline]
325 pub fn set_far_segment_id(&mut self, si: SegmentId) {
326 self.upper32bits.set(si)
327 }
328
329 #[inline]
330 pub fn cap_index(&self) -> u32 {
331 self.upper32bits.get()
332 }
333
334 #[inline]
335 pub fn set_cap_index(&mut self, index: u32) {
336 self.upper32bits.set(index)
337 }
338
339 #[inline]
340 pub fn is_null(&self) -> bool {
341 self.offset_and_kind.get() == 0 && self.upper32bits.get() == 0
342 }
343}
344
345mod wire_helpers {
346 use core::{ptr, slice};
347
348 use crate::data;
349 use crate::private::arena::*;
350 #[cfg(feature = "alloc")]
351 use crate::private::capability::ClientHook;
352 use crate::private::layout::ElementSize::*;
353 use crate::private::layout::{data_bits_per_element, pointers_per_element};
354 use crate::private::layout::{CapTableBuilder, CapTableReader};
355 use crate::private::layout::{
356 ElementSize, ListBuilder, ListReader, StructBuilder, StructReader, StructSize, WirePointer,
357 WirePointerKind,
358 };
359 use crate::private::units::*;
360 use crate::text;
361 use crate::{Error, ErrorKind, MessageSize, Result};
362
363 pub struct SegmentAnd<T> {
364 #[allow(dead_code)]
365 segment_id: u32,
366 pub value: T,
367 }
368
369 #[inline]
370 pub fn round_bytes_up_to_words(bytes: ByteCount32) -> WordCount32 {
371 (bytes + 7) / BYTES_PER_WORD as u32
373 }
374
375 #[inline]
380 pub fn round_bits_up_to_words(bits: BitCount64) -> WordCount32 {
381 ((bits + 63) / (BITS_PER_WORD as u64)) as WordCount32
383 }
384
385 #[allow(dead_code)]
386 #[inline]
387 pub fn round_bits_up_to_bytes(bits: BitCount64) -> ByteCount32 {
388 ((bits + 7) / (BITS_PER_BYTE as u64)) as ByteCount32
389 }
390
391 #[inline]
392 pub fn bounds_check(
393 arena: &dyn ReaderArena,
394 segment_id: u32,
395 start: *const u8,
396 size_in_words: usize,
397 _kind: WirePointerKind,
398 ) -> Result<()> {
399 arena.contains_interval(segment_id, start, size_in_words)
400 }
401
402 #[inline]
403 pub fn amplified_read(arena: &dyn ReaderArena, virtual_amount: u64) -> Result<()> {
404 arena.amplified_read(virtual_amount)
405 }
406
407 #[inline]
408 pub unsafe fn copy_nonoverlapping_check_zero<T>(src: *const T, dst: *mut T, count: usize) {
409 if count > 0 {
410 ptr::copy_nonoverlapping(src, dst, count);
411 }
412 }
413
414 #[inline]
415 pub unsafe fn allocate(
416 arena: &mut dyn BuilderArena,
417 reff: *mut WirePointer,
418 segment_id: u32,
419 amount: WordCount32,
420 kind: WirePointerKind,
421 ) -> (*mut u8, *mut WirePointer, u32) {
422 let is_null = (*reff).is_null();
423 if !is_null {
424 zero_object(arena, segment_id, reff)
425 }
426
427 if amount == 0 && kind == WirePointerKind::Struct {
428 (*reff).set_kind_and_target_for_empty_struct();
429 return (reff as *mut _, reff, segment_id);
430 }
431
432 match arena.allocate(segment_id, amount) {
433 None => {
434 let amount_plus_ref = amount + POINTER_SIZE_IN_WORDS as u32;
439 let (segment_id, word_idx) = arena.allocate_anywhere(amount_plus_ref);
440 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
441 let ptr = seg_start.offset(word_idx as isize * BYTES_PER_WORD as isize);
442
443 (*reff).set_far(false, word_idx);
446 (*reff).set_far_segment_id(segment_id);
447
448 let reff = ptr as *mut WirePointer;
451
452 let ptr1 = ptr.add(BYTES_PER_WORD);
453 (*reff).set_kind_and_target(kind, ptr1);
454 (ptr1, reff, segment_id)
455 }
456 Some(idx) => {
457 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
458 let ptr = (seg_start).offset(idx as isize * BYTES_PER_WORD as isize);
459 (*reff).set_kind_and_target(kind, ptr);
460 (ptr, reff, segment_id)
461 }
462 }
463 }
464
465 #[inline]
466 pub unsafe fn follow_builder_fars(
467 arena: &mut dyn BuilderArena,
468 reff: *mut WirePointer,
469 ref_target: *mut u8,
470 segment_id: u32,
471 ) -> Result<(*mut u8, *mut WirePointer, u32)> {
472 if (*reff).kind() == WirePointerKind::Far {
482 let segment_id = (*reff).far_segment_id();
483 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
484 let pad: *mut WirePointer =
485 (seg_start as *mut WirePointer).offset((*reff).far_position_in_segment() as isize);
486 if !(*reff).is_double_far() {
487 Ok((WirePointer::mut_target(pad), pad, segment_id))
488 } else {
489 let reff = pad.offset(1);
492
493 let segment_id = (*pad).far_segment_id();
494 let (segment_start, _segment_len) = arena.get_segment_mut(segment_id);
495 let ptr = segment_start
496 .offset((*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
497 Ok((ptr, reff, segment_id))
498 }
499 } else {
500 Ok((ref_target, reff, segment_id))
501 }
502 }
503
504 #[inline]
509 pub unsafe fn follow_fars(
510 arena: &dyn ReaderArena,
511 reff: *const WirePointer,
512 segment_id: u32,
513 ) -> Result<(*const u8, *const WirePointer, u32)> {
514 if (*reff).kind() == WirePointerKind::Far {
515 let far_segment_id = (*reff).far_segment_id();
516
517 let (seg_start, _seg_len) = arena.get_segment(far_segment_id)?;
518 let ptr = seg_start
519 .offset((*reff).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
520
521 let pad_words: usize = if (*reff).is_double_far() { 2 } else { 1 };
522 bounds_check(arena, far_segment_id, ptr, pad_words, WirePointerKind::Far)?;
523
524 let pad: *const WirePointer = ptr as *const _;
525
526 if !(*reff).is_double_far() {
527 Ok((
528 WirePointer::target_from_segment(pad, arena, far_segment_id)?,
529 pad,
530 far_segment_id,
531 ))
532 } else {
533 let tag = pad.offset(1);
537 let double_far_segment_id = (*pad).far_segment_id();
538 let (segment_start, _segment_len) = arena.get_segment(double_far_segment_id)?;
539 let ptr = segment_start
540 .offset((*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
541 Ok((ptr, tag, double_far_segment_id))
542 }
543 } else {
544 Ok((
545 WirePointer::target_from_segment(reff, arena, segment_id)?,
546 reff,
547 segment_id,
548 ))
549 }
550 }
551
552 pub unsafe fn zero_object(
553 arena: &mut dyn BuilderArena,
554 segment_id: u32,
555 reff: *mut WirePointer,
556 ) {
557 match (*reff).kind() {
562 WirePointerKind::Struct | WirePointerKind::List | WirePointerKind::Other => {
563 zero_object_helper(arena, segment_id, reff, WirePointer::mut_target(reff))
564 }
565 WirePointerKind::Far => {
566 let segment_id = (*reff).far_segment_id();
567 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
568 let pad: *mut WirePointer = (seg_start as *mut WirePointer)
569 .offset((*reff).far_position_in_segment() as isize);
570
571 if (*reff).is_double_far() {
572 let segment_id = (*pad).far_segment_id();
573
574 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
575 let ptr = seg_start.offset(
576 (*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize,
577 );
578 zero_object_helper(arena, segment_id, pad.offset(1), ptr);
579
580 ptr::write_bytes(pad, 0u8, 2);
581 } else {
582 zero_object(arena, segment_id, pad);
583 ptr::write_bytes(pad, 0u8, 1);
584 }
585 }
586 }
587 }
588
589 pub unsafe fn zero_object_helper(
590 arena: &mut dyn BuilderArena,
591 segment_id: u32,
592 tag: *mut WirePointer,
593 ptr: *mut u8,
594 ) {
595 match (*tag).kind() {
596 WirePointerKind::Other => {
597 panic!("Don't know how to handle OTHER")
598 }
599 WirePointerKind::Struct => {
600 let pointer_section: *mut WirePointer = ptr
601 .offset((*tag).struct_data_size() as isize * BYTES_PER_WORD as isize)
602 as *mut _;
603
604 let count = (*tag).struct_ptr_count() as isize;
605 for i in 0..count {
606 zero_object(arena, segment_id, pointer_section.offset(i));
607 }
608 ptr::write_bytes(
609 ptr,
610 0u8,
611 (*tag).struct_word_size() as usize * BYTES_PER_WORD,
612 );
613 }
614 WirePointerKind::List => match (*tag).list_element_size() {
615 Void => {}
616 Bit | Byte | TwoBytes | FourBytes | EightBytes => ptr::write_bytes(
617 ptr,
618 0u8,
619 BYTES_PER_WORD
620 * round_bits_up_to_words(
621 u64::from((*tag).list_element_count())
622 * u64::from(data_bits_per_element((*tag).list_element_size())),
623 ) as usize,
624 ),
625 Pointer => {
626 let count = (*tag).list_element_count() as usize;
627 for i in 0..count as isize {
628 zero_object(
629 arena,
630 segment_id,
631 ptr.offset(i * BYTES_PER_WORD as isize) as *mut _,
632 );
633 }
634 ptr::write_bytes(ptr, 0u8, count * BYTES_PER_WORD);
635 }
636 InlineComposite => {
637 let element_tag: *mut WirePointer = ptr as *mut _;
638
639 assert!(
640 (*element_tag).kind() == WirePointerKind::Struct,
641 "Don't know how to handle non-STRUCT inline composite"
642 );
643
644 let data_size = (*element_tag).struct_data_size();
645 let pointer_count = (*element_tag).struct_ptr_count();
646 let mut pos = ptr.add(BYTES_PER_WORD);
647 let count = (*element_tag).inline_composite_list_element_count();
648 if pointer_count > 0 {
649 for _ in 0..count {
650 pos = pos.offset(data_size as isize * BYTES_PER_WORD as isize);
651 for _ in 0..pointer_count {
652 zero_object(arena, segment_id, pos as *mut WirePointer);
653 pos = pos.add(BYTES_PER_WORD);
654 }
655 }
656 }
657 ptr::write_bytes(
658 ptr,
659 0u8,
660 BYTES_PER_WORD * ((*element_tag).struct_word_size() * count + 1) as usize,
661 );
662 }
663 },
664 WirePointerKind::Far => {
665 panic!("Unexpected FAR pointer")
666 }
667 }
668 }
669
670 #[inline]
671 pub unsafe fn zero_pointer_and_fars(
672 arena: &mut dyn BuilderArena,
673 _segment_id: u32,
674 reff: *mut WirePointer,
675 ) -> Result<()> {
676 if (*reff).kind() == WirePointerKind::Far {
680 let far_segment_id = (*reff).far_segment_id();
681 let (seg_start, _seg_len) = arena.get_segment_mut(far_segment_id);
682 let pad = seg_start
683 .offset((*reff).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
684 let num_elements = if (*reff).is_double_far() { 2 } else { 1 };
685 ptr::write_bytes(pad, 0, num_elements * BYTES_PER_WORD);
686 }
687 ptr::write_bytes(reff, 0, 1);
688 Ok(())
689 }
690
691 pub unsafe fn total_size(
692 arena: &dyn ReaderArena,
693 segment_id: u32,
694 reff: *const WirePointer,
695 mut nesting_limit: i32,
696 ) -> Result<MessageSize> {
697 let mut result = MessageSize {
698 word_count: 0,
699 cap_count: 0,
700 };
701
702 if (*reff).is_null() {
703 return Ok(result);
704 };
705
706 if nesting_limit <= 0 {
707 return Err(Error::from_kind(ErrorKind::MessageIsTooDeeplyNested));
708 }
709
710 nesting_limit -= 1;
711
712 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
713
714 match (*reff).kind() {
715 WirePointerKind::Struct => {
716 bounds_check(
717 arena,
718 segment_id,
719 ptr,
720 (*reff).struct_word_size() as usize,
721 WirePointerKind::Struct,
722 )?;
723 result.word_count += u64::from((*reff).struct_word_size());
724
725 let pointer_section: *const WirePointer = ptr
726 .offset((*reff).struct_data_size() as isize * BYTES_PER_WORD as isize)
727 as *const _;
728 let count: isize = (*reff).struct_ptr_count() as isize;
729 for i in 0..count {
730 result +=
731 total_size(arena, segment_id, pointer_section.offset(i), nesting_limit)?;
732 }
733 }
734 WirePointerKind::List => {
735 match (*reff).list_element_size() {
736 Void => {}
737 Bit | Byte | TwoBytes | FourBytes | EightBytes => {
738 let total_words = round_bits_up_to_words(
739 u64::from((*reff).list_element_count())
740 * u64::from(data_bits_per_element((*reff).list_element_size())),
741 );
742 bounds_check(
743 arena,
744 segment_id,
745 ptr,
746 total_words as usize,
747 WirePointerKind::List,
748 )?;
749 result.word_count += u64::from(total_words);
750 }
751 Pointer => {
752 let count = (*reff).list_element_count();
753 bounds_check(
754 arena,
755 segment_id,
756 ptr,
757 count as usize * WORDS_PER_POINTER,
758 WirePointerKind::List,
759 )?;
760
761 result.word_count += u64::from(count) * WORDS_PER_POINTER as u64;
762
763 for i in 0..count as isize {
764 result += total_size(
765 arena,
766 segment_id,
767 (ptr as *const WirePointer).offset(i),
768 nesting_limit,
769 )?;
770 }
771 }
772 InlineComposite => {
773 let word_count = (*reff).list_inline_composite_word_count();
774 bounds_check(
775 arena,
776 segment_id,
777 ptr,
778 word_count as usize + POINTER_SIZE_IN_WORDS,
779 WirePointerKind::List,
780 )?;
781
782 let element_tag: *const WirePointer = ptr as *const _;
783 let count = (*element_tag).inline_composite_list_element_count();
784
785 if (*element_tag).kind() != WirePointerKind::Struct {
786 return Err(Error::from_kind(
787 ErrorKind::CantHandleNonStructInlineComposite,
788 ));
789 }
790
791 let actual_size =
792 u64::from((*element_tag).struct_word_size()) * u64::from(count);
793 if actual_size > u64::from(word_count) {
794 return Err(Error::from_kind(
795 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
796 ));
797 }
798
799 result.word_count += actual_size + POINTER_SIZE_IN_WORDS as u64;
802
803 let data_size = (*element_tag).struct_data_size();
804 let pointer_count = (*element_tag).struct_ptr_count();
805
806 if pointer_count > 0 {
807 let mut pos = ptr.add(BYTES_PER_WORD);
808 for _ in 0..count {
809 pos = pos.offset(data_size as isize * BYTES_PER_WORD as isize);
810
811 for _ in 0..pointer_count {
812 result += total_size(
813 arena,
814 segment_id,
815 pos as *const WirePointer,
816 nesting_limit,
817 )?;
818 pos = pos.add(BYTES_PER_WORD);
819 }
820 }
821 }
822 }
823 }
824 }
825 WirePointerKind::Far => {
826 return Err(Error::from_kind(ErrorKind::MalformedDoubleFarPointer));
827 }
828 WirePointerKind::Other => {
829 if (*reff).is_capability() {
830 result.cap_count += 1;
831 } else {
832 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
833 }
834 }
835 }
836
837 Ok(result)
838 }
839
840 unsafe fn copy_struct(
842 arena: &mut dyn BuilderArena,
843 segment_id: u32,
844 cap_table: CapTableBuilder,
845 dst: *mut u8,
846 src: *const u8,
847 data_size: isize,
848 pointer_count: isize,
849 ) {
850 copy_nonoverlapping_check_zero(src, dst, data_size as usize * BYTES_PER_WORD);
851
852 let src_refs: *const WirePointer = (src as *const WirePointer).offset(data_size);
853 let dst_refs: *mut WirePointer = (dst as *mut WirePointer).offset(data_size);
854
855 for ii in 0..pointer_count {
856 copy_message(
857 arena,
858 segment_id,
859 cap_table,
860 dst_refs.offset(ii),
861 src_refs.offset(ii),
862 );
863 }
864 }
865
866 pub unsafe fn copy_message(
869 arena: &mut dyn BuilderArena,
870 segment_id: u32,
871 cap_table: CapTableBuilder,
872 dst: *mut WirePointer,
873 src: *const WirePointer,
874 ) -> (*mut u8, *mut WirePointer, u32) {
875 match (*src).kind() {
876 WirePointerKind::Struct => {
877 if (*src).is_null() {
878 ptr::write_bytes(dst, 0, 1);
879 (ptr::null_mut(), dst, segment_id)
880 } else {
881 let src_ptr = WirePointer::target(src);
882 let (dst_ptr, dst, segment_id) = allocate(
883 arena,
884 dst,
885 segment_id,
886 (*src).struct_word_size(),
887 WirePointerKind::Struct,
888 );
889 copy_struct(
890 arena,
891 segment_id,
892 cap_table,
893 dst_ptr,
894 src_ptr,
895 (*src).struct_data_size() as isize,
896 (*src).struct_ptr_count() as isize,
897 );
898 (*dst).set_struct_size_from_pieces(
899 (*src).struct_data_size(),
900 (*src).struct_ptr_count(),
901 );
902 (dst_ptr, dst, segment_id)
903 }
904 }
905 WirePointerKind::List => match (*src).list_element_size() {
906 ElementSize::Void
907 | ElementSize::Bit
908 | ElementSize::Byte
909 | ElementSize::TwoBytes
910 | ElementSize::FourBytes
911 | ElementSize::EightBytes => {
912 let word_count = round_bits_up_to_words(
913 u64::from((*src).list_element_count())
914 * u64::from(data_bits_per_element((*src).list_element_size())),
915 );
916 let src_ptr = WirePointer::target(src);
917 let (dst_ptr, dst, segment_id) =
918 allocate(arena, dst, segment_id, word_count, WirePointerKind::List);
919 copy_nonoverlapping_check_zero(
920 src_ptr,
921 dst_ptr,
922 word_count as usize * BYTES_PER_WORD,
923 );
924 (*dst).set_list_size_and_count(
925 (*src).list_element_size(),
926 (*src).list_element_count(),
927 );
928 (dst_ptr, dst, segment_id)
929 }
930
931 ElementSize::Pointer => {
932 let src_refs: *const WirePointer = WirePointer::target(src) as _;
933 let (dst_refs, dst, segment_id) = allocate(
934 arena,
935 dst,
936 segment_id,
937 (*src).list_element_count(),
938 WirePointerKind::List,
939 );
940 for ii in 0..((*src).list_element_count() as isize) {
941 copy_message(
942 arena,
943 segment_id,
944 cap_table,
945 dst_refs.offset(ii * BYTES_PER_WORD as isize) as *mut WirePointer,
946 src_refs.offset(ii),
947 );
948 }
949 (*dst)
950 .set_list_size_and_count(ElementSize::Pointer, (*src).list_element_count());
951 (dst_refs, dst, segment_id)
952 }
953 ElementSize::InlineComposite => {
954 let src_ptr = WirePointer::target(src);
955 let (dst_ptr, dst, segment_id) = allocate(
956 arena,
957 dst,
958 segment_id,
959 (*src).list_inline_composite_word_count() + 1,
960 WirePointerKind::List,
961 );
962
963 (*dst).set_list_inline_composite((*src).list_inline_composite_word_count());
964
965 let src_tag: *const WirePointer = src_ptr as _;
966 ptr::copy_nonoverlapping(src_tag, dst_ptr as *mut WirePointer, 1);
967
968 let mut src_element = src_ptr.add(BYTES_PER_WORD);
969 let mut dst_element = dst_ptr.add(BYTES_PER_WORD);
970
971 if (*src_tag).kind() != WirePointerKind::Struct {
972 panic!("unsupported INLINE_COMPOSITE list");
973 }
974 for _ in 0..(*src_tag).inline_composite_list_element_count() {
975 copy_struct(
976 arena,
977 segment_id,
978 cap_table,
979 dst_element,
980 src_element,
981 (*src_tag).struct_data_size() as isize,
982 (*src_tag).struct_ptr_count() as isize,
983 );
984 src_element = src_element.offset(
985 BYTES_PER_WORD as isize * (*src_tag).struct_word_size() as isize,
986 );
987 dst_element = dst_element.offset(
988 BYTES_PER_WORD as isize * (*src_tag).struct_word_size() as isize,
989 );
990 }
991 (dst_ptr, dst, segment_id)
992 }
993 },
994 WirePointerKind::Other => {
995 panic!("Unchecked message contained an OTHER pointer.")
996 }
997 WirePointerKind::Far => {
998 panic!("Unchecked message contained a far pointer.")
999 }
1000 }
1001 }
1002
1003 pub unsafe fn transfer_pointer(
1004 arena: &mut dyn BuilderArena,
1005 dst_segment_id: u32,
1006 dst: *mut WirePointer,
1007 src_segment_id: u32,
1008 src: *mut WirePointer,
1009 ) {
1010 assert!((*dst).is_null());
1021 if (*src).is_null() {
1024 ptr::write_bytes(dst, 0, 1);
1025 } else if (*src).is_positional() {
1026 transfer_pointer_split(
1027 arena,
1028 dst_segment_id,
1029 dst,
1030 src_segment_id,
1031 src,
1032 WirePointer::mut_target(src),
1033 );
1034 } else {
1035 ptr::copy_nonoverlapping(src, dst, 1);
1036 }
1037 }
1038
1039 pub unsafe fn transfer_pointer_split(
1040 arena: &mut dyn BuilderArena,
1041 dst_segment_id: u32,
1042 dst: *mut WirePointer,
1043 src_segment_id: u32,
1044 src_tag: *mut WirePointer,
1045 src_ptr: *mut u8,
1046 ) {
1047 if dst_segment_id == src_segment_id {
1051 if (*src_tag).kind() == WirePointerKind::Struct && (*src_tag).struct_word_size() == 0 {
1054 (*dst).set_kind_and_target_for_empty_struct();
1055 } else {
1056 (*dst).set_kind_and_target((*src_tag).kind(), src_ptr);
1057 }
1058 ptr::copy_nonoverlapping(&(*src_tag).upper32bits, &mut (*dst).upper32bits, 1);
1060 } else {
1061 match arena.allocate(src_segment_id, 1) {
1065 None => {
1066 let (far_segment_id, word_idx) = arena.allocate_anywhere(2);
1068 let (seg_start, _seg_len) = arena.get_segment_mut(far_segment_id);
1069 let landing_pad: *mut WirePointer =
1070 (seg_start as *mut WirePointer).offset(word_idx as isize);
1071
1072 let (src_seg_start, _seg_len) = arena.get_segment_mut(src_segment_id);
1073
1074 (*landing_pad).set_far(
1075 false,
1076 ((src_ptr as usize - src_seg_start as usize) / BYTES_PER_WORD) as u32,
1077 );
1078 (*landing_pad).set_far_segment_id(src_segment_id);
1079
1080 let landing_pad1 = landing_pad.offset(1);
1081 (*landing_pad1).set_kind_with_zero_offset((*src_tag).kind());
1082
1083 ptr::copy_nonoverlapping(
1084 &(*src_tag).upper32bits,
1085 &mut (*landing_pad1).upper32bits,
1086 1,
1087 );
1088
1089 (*dst).set_far(true, word_idx);
1090 (*dst).set_far_segment_id(far_segment_id);
1091 }
1092 Some(landing_pad_word) => {
1093 let (seg_start, seg_len) = arena.get_segment_mut(src_segment_id);
1095 assert!(landing_pad_word < seg_len);
1096 let landing_pad: *mut WirePointer =
1097 (seg_start as *mut WirePointer).offset(landing_pad_word as isize);
1098 (*landing_pad).set_kind_and_target((*src_tag).kind(), src_ptr);
1099 ptr::copy_nonoverlapping(
1100 &(*src_tag).upper32bits,
1101 &mut (*landing_pad).upper32bits,
1102 1,
1103 );
1104
1105 (*dst).set_far(false, landing_pad_word);
1106 (*dst).set_far_segment_id(src_segment_id);
1107 }
1108 }
1109 }
1110 }
1111
1112 #[inline]
1113 pub unsafe fn init_struct_pointer(
1114 arena: &mut dyn BuilderArena,
1115 reff: *mut WirePointer,
1116 segment_id: u32,
1117 cap_table: CapTableBuilder,
1118 size: StructSize,
1119 ) -> StructBuilder<'_> {
1120 let (ptr, reff, segment_id) = allocate(
1121 arena,
1122 reff,
1123 segment_id,
1124 size.total(),
1125 WirePointerKind::Struct,
1126 );
1127 (*reff).set_struct_size(size);
1128
1129 StructBuilder {
1130 arena,
1131 segment_id,
1132 cap_table,
1133 data: ptr as *mut _,
1134 pointers: ptr.offset((size.data as usize) as isize * BYTES_PER_WORD as isize) as *mut _,
1135 data_size: u32::from(size.data) * (BITS_PER_WORD as BitCount32),
1136 pointer_count: size.pointers,
1137 }
1138 }
1139
1140 #[inline]
1141 pub unsafe fn get_writable_struct_pointer<'a>(
1142 arena: &'a mut dyn BuilderArena,
1143 mut reff: *mut WirePointer,
1144 mut segment_id: u32,
1145 cap_table: CapTableBuilder,
1146 size: StructSize,
1147 default: Option<&'a [crate::Word]>,
1148 ) -> Result<StructBuilder<'a>> {
1149 let mut ref_target = WirePointer::mut_target(reff);
1150
1151 if (*reff).is_null() {
1152 match default {
1153 None => {
1154 return Ok(init_struct_pointer(
1155 arena, reff, segment_id, cap_table, size,
1156 ))
1157 }
1158 Some(d) if (*(d.as_ptr() as *const WirePointer)).is_null() => {
1159 return Ok(init_struct_pointer(
1160 arena, reff, segment_id, cap_table, size,
1161 ))
1162 }
1163 Some(d) => {
1164 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1165 arena,
1166 segment_id,
1167 cap_table,
1168 reff,
1169 d.as_ptr() as *const WirePointer,
1170 );
1171 reff = new_reff;
1172 segment_id = new_segment_id;
1173 ref_target = new_ref_target;
1174 }
1175 }
1176 }
1177
1178 let (old_ptr, old_ref, old_segment_id) =
1179 follow_builder_fars(arena, reff, ref_target, segment_id)?;
1180 if (*old_ref).kind() != WirePointerKind::Struct {
1181 return Err(Error::from_kind(
1182 ErrorKind::MessageContainsNonStructPointerWhereStructPointerWasExpected,
1183 ));
1184 }
1185
1186 let old_data_size = (*old_ref).struct_data_size();
1187 let old_pointer_count = (*old_ref).struct_ptr_count();
1188 let old_pointer_section: *mut WirePointer =
1189 old_ptr.offset(old_data_size as isize * BYTES_PER_WORD as isize) as *mut _;
1190
1191 if old_data_size < size.data || old_pointer_count < size.pointers {
1192 let new_data_size = ::core::cmp::max(old_data_size, size.data);
1199 let new_pointer_count = ::core::cmp::max(old_pointer_count, size.pointers);
1200 let total_size =
1201 u32::from(new_data_size) + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1202
1203 zero_pointer_and_fars(arena, segment_id, reff)?;
1205
1206 let (ptr, reff, segment_id) =
1207 allocate(arena, reff, segment_id, total_size, WirePointerKind::Struct);
1208 (*reff).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1209
1210 copy_nonoverlapping_check_zero(old_ptr, ptr, old_data_size as usize * BYTES_PER_WORD);
1213
1214 let new_pointer_section: *mut WirePointer =
1216 ptr.offset(new_data_size as isize * BYTES_PER_WORD as isize) as *mut _;
1217 for i in 0..old_pointer_count as isize {
1218 transfer_pointer(
1219 arena,
1220 segment_id,
1221 new_pointer_section.offset(i),
1222 old_segment_id,
1223 old_pointer_section.offset(i),
1224 );
1225 }
1226
1227 ptr::write_bytes(
1228 old_ptr,
1229 0,
1230 (old_data_size as usize + old_pointer_count as usize) * BYTES_PER_WORD,
1231 );
1232
1233 Ok(StructBuilder {
1234 arena,
1235 segment_id,
1236 cap_table,
1237 data: ptr as *mut _,
1238 pointers: new_pointer_section,
1239 data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1240 pointer_count: new_pointer_count,
1241 })
1242 } else {
1243 Ok(StructBuilder {
1244 arena,
1245 segment_id: old_segment_id,
1246 cap_table,
1247 data: old_ptr,
1248 pointers: old_pointer_section,
1249 data_size: u32::from(old_data_size) * BITS_PER_WORD as u32,
1250 pointer_count: old_pointer_count,
1251 })
1252 }
1253 }
1254
1255 #[inline]
1256 pub unsafe fn init_list_pointer(
1257 arena: &mut dyn BuilderArena,
1258 reff: *mut WirePointer,
1259 segment_id: u32,
1260 cap_table: CapTableBuilder,
1261 element_count: ElementCount32,
1262 element_size: ElementSize,
1263 ) -> ListBuilder<'_> {
1264 assert!(
1265 element_size != InlineComposite,
1266 "Should have called initStructListPointer() instead"
1267 );
1268
1269 let data_size = data_bits_per_element(element_size);
1270 let pointer_count = pointers_per_element(element_size);
1271 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
1272 let word_count = round_bits_up_to_words(u64::from(element_count) * u64::from(step));
1273 let (ptr, reff, segment_id) =
1274 allocate(arena, reff, segment_id, word_count, WirePointerKind::List);
1275
1276 (*reff).set_list_size_and_count(element_size, element_count);
1277
1278 ListBuilder {
1279 arena,
1280 segment_id,
1281 cap_table,
1282 ptr,
1283 step,
1284 element_count,
1285 element_size,
1286 struct_data_size: data_size,
1287 struct_pointer_count: pointer_count as u16,
1288 }
1289 }
1290
1291 #[inline]
1292 pub unsafe fn init_struct_list_pointer(
1293 arena: &mut dyn BuilderArena,
1294 reff: *mut WirePointer,
1295 segment_id: u32,
1296 cap_table: CapTableBuilder,
1297 element_count: ElementCount32,
1298 element_size: StructSize,
1299 ) -> ListBuilder<'_> {
1300 let words_per_element = element_size.total();
1301
1302 let word_count: WordCount32 = element_count * words_per_element;
1304 let (ptr, reff, segment_id) = allocate(
1305 arena,
1306 reff,
1307 segment_id,
1308 POINTER_SIZE_IN_WORDS as u32 + word_count,
1309 WirePointerKind::List,
1310 );
1311 let ptr = ptr as *mut WirePointer;
1312
1313 (*reff).set_list_inline_composite(word_count);
1315 (*ptr).set_kind_and_inline_composite_list_element_count(
1316 WirePointerKind::Struct,
1317 element_count,
1318 );
1319 (*ptr).set_struct_size(element_size);
1320
1321 let ptr1 = ptr.add(POINTER_SIZE_IN_WORDS);
1322
1323 ListBuilder {
1324 arena,
1325 segment_id,
1326 cap_table,
1327 ptr: ptr1 as *mut _,
1328 step: words_per_element * BITS_PER_WORD as u32,
1329 element_count,
1330 element_size: ElementSize::InlineComposite,
1331 struct_data_size: u32::from(element_size.data) * (BITS_PER_WORD as u32),
1332 struct_pointer_count: element_size.pointers,
1333 }
1334 }
1335
1336 #[inline]
1337 pub unsafe fn get_writable_list_pointer(
1338 arena: &mut dyn BuilderArena,
1339 mut orig_ref: *mut WirePointer,
1340 mut orig_segment_id: u32,
1341 cap_table: CapTableBuilder,
1342 element_size: ElementSize,
1343 default_value: *const u8,
1344 ) -> Result<ListBuilder<'_>> {
1345 assert!(
1346 element_size != InlineComposite,
1347 "Use get_writable_struct_list_pointer() for struct lists"
1348 );
1349
1350 let mut orig_ref_target = WirePointer::mut_target(orig_ref);
1351
1352 if (*orig_ref).is_null() {
1353 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
1354 return Ok(ListBuilder::new_default(arena));
1355 }
1356 let (new_orig_ref_target, new_orig_ref, new_orig_segment_id) = copy_message(
1357 arena,
1358 orig_segment_id,
1359 cap_table,
1360 orig_ref,
1361 default_value as *const WirePointer,
1362 );
1363 orig_ref_target = new_orig_ref_target;
1364 orig_ref = new_orig_ref;
1365 orig_segment_id = new_orig_segment_id;
1366 }
1367
1368 let (mut ptr, reff, segment_id) =
1374 follow_builder_fars(arena, orig_ref, orig_ref_target, orig_segment_id)?;
1375
1376 if (*reff).kind() != WirePointerKind::List {
1377 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1378 }
1379
1380 let old_size = (*reff).list_element_size();
1381
1382 if old_size == InlineComposite {
1383 let tag: *const WirePointer = ptr as *const _;
1391
1392 if (*tag).kind() != WirePointerKind::Struct {
1393 return Err(Error::from_kind(
1394 ErrorKind::InlineCompositeListWithNonStructElementsNotSupported,
1395 ));
1396 }
1397
1398 ptr = ptr.add(BYTES_PER_WORD);
1399
1400 let data_size = (*tag).struct_data_size();
1401 let pointer_count = (*tag).struct_ptr_count();
1402
1403 match element_size {
1404 Void => {} Bit => {
1406 return Err(Error::from_kind(
1407 ErrorKind::FoundStructListWhereBitListWasExpected,
1408 ));
1409 }
1410 Byte | TwoBytes | FourBytes | EightBytes => {
1411 if data_size < 1 {
1412 return Err(Error::from_kind(
1413 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1414 ));
1415 }
1416 }
1417 Pointer => {
1418 if pointer_count < 1 {
1419 return Err(Error::from_kind(
1420 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1421 ));
1422 }
1423 ptr = ptr.offset(data_size as isize * BYTES_PER_WORD as isize);
1425 }
1426 InlineComposite => {
1427 unreachable!()
1428 }
1429 }
1430 Ok(ListBuilder {
1433 arena,
1434 segment_id,
1435 cap_table,
1436 ptr: ptr as *mut _,
1437 element_count: (*tag).inline_composite_list_element_count(),
1438 element_size: ElementSize::InlineComposite,
1439 step: (*tag).struct_word_size() * BITS_PER_WORD as u32,
1440 struct_data_size: u32::from(data_size) * BITS_PER_WORD as u32,
1441 struct_pointer_count: pointer_count,
1442 })
1443 } else {
1444 let data_size = data_bits_per_element(old_size);
1445 let pointer_count = pointers_per_element(old_size);
1446
1447 if data_size < data_bits_per_element(element_size)
1448 || pointer_count < pointers_per_element(element_size)
1449 {
1450 return Err(Error::from_kind(
1451 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1452 ));
1453 }
1454
1455 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
1456
1457 Ok(ListBuilder {
1458 arena,
1459 segment_id,
1460 cap_table,
1461 ptr: ptr as *mut _,
1462 step,
1463 element_count: (*reff).list_element_count(),
1464 element_size: old_size,
1465 struct_data_size: data_size,
1466 struct_pointer_count: pointer_count as u16,
1467 })
1468 }
1469 }
1470
1471 #[inline]
1472 pub unsafe fn get_writable_struct_list_pointer(
1473 arena: &mut dyn BuilderArena,
1474 mut orig_ref: *mut WirePointer,
1475 mut orig_segment_id: u32,
1476 cap_table: CapTableBuilder,
1477 element_size: StructSize,
1478 default_value: *const u8,
1479 ) -> Result<ListBuilder<'_>> {
1480 let mut orig_ref_target = WirePointer::mut_target(orig_ref);
1481
1482 if (*orig_ref).is_null() {
1483 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
1484 return Ok(ListBuilder::new_default(arena));
1485 }
1486 let (new_orig_ref_target, new_orig_ref, new_orig_segment_id) = copy_message(
1487 arena,
1488 orig_segment_id,
1489 cap_table,
1490 orig_ref,
1491 default_value as *const WirePointer,
1492 );
1493 orig_ref_target = new_orig_ref_target;
1494 orig_ref = new_orig_ref;
1495 orig_segment_id = new_orig_segment_id;
1496 }
1497
1498 let (mut old_ptr, old_ref, old_segment_id) =
1501 follow_builder_fars(arena, orig_ref, orig_ref_target, orig_segment_id)?;
1502
1503 if (*old_ref).kind() != WirePointerKind::List {
1504 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1505 }
1506
1507 let old_size = (*old_ref).list_element_size();
1508
1509 if old_size == InlineComposite {
1510 let old_tag: *const WirePointer = old_ptr as *const _;
1513 old_ptr = old_ptr.add(BYTES_PER_WORD);
1514 if (*old_tag).kind() != WirePointerKind::Struct {
1515 return Err(Error::from_kind(
1516 ErrorKind::InlineCompositeListWithNonStructElementsNotSupported,
1517 ));
1518 }
1519
1520 let old_data_size = (*old_tag).struct_data_size();
1521 let old_pointer_count = (*old_tag).struct_ptr_count();
1522 let old_step =
1523 u32::from(old_data_size) + u32::from(old_pointer_count) * WORDS_PER_POINTER as u32;
1524 let element_count = (*old_tag).inline_composite_list_element_count();
1525
1526 if old_data_size >= element_size.data && old_pointer_count >= element_size.pointers {
1527 return Ok(ListBuilder {
1529 arena,
1530 segment_id: old_segment_id,
1531 cap_table,
1532 ptr: old_ptr as *mut _,
1533 element_count,
1534 element_size: ElementSize::InlineComposite,
1535 step: old_step * BITS_PER_WORD as u32,
1536 struct_data_size: u32::from(old_data_size) * BITS_PER_WORD as u32,
1537 struct_pointer_count: old_pointer_count,
1538 });
1539 }
1540
1541 let new_data_size = ::core::cmp::max(old_data_size, element_size.data);
1545 let new_pointer_count = ::core::cmp::max(old_pointer_count, element_size.pointers);
1546 let new_step =
1547 u32::from(new_data_size) + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1548 let total_size = new_step * element_count;
1549
1550 zero_pointer_and_fars(arena, orig_segment_id, orig_ref)?;
1552
1553 let (mut new_ptr, new_ref, new_segment_id) = allocate(
1554 arena,
1555 orig_ref,
1556 orig_segment_id,
1557 total_size + POINTER_SIZE_IN_WORDS as u32,
1558 WirePointerKind::List,
1559 );
1560 (*new_ref).set_list_inline_composite(total_size);
1561
1562 let new_tag: *mut WirePointer = new_ptr as *mut _;
1563 (*new_tag).set_kind_and_inline_composite_list_element_count(
1564 WirePointerKind::Struct,
1565 element_count,
1566 );
1567 (*new_tag).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1568 new_ptr = new_ptr.add(BYTES_PER_WORD);
1569
1570 let mut src = old_ptr as *mut WirePointer;
1571 let mut dst = new_ptr as *mut WirePointer;
1572 for _ in 0..element_count {
1573 copy_nonoverlapping_check_zero(src, dst, old_data_size as usize);
1575
1576 let new_pointer_section = dst.offset(new_data_size as isize);
1578 let old_pointer_section = src.offset(old_data_size as isize);
1579 for jj in 0..(old_pointer_count as isize) {
1580 transfer_pointer(
1581 arena,
1582 new_segment_id,
1583 new_pointer_section.offset(jj),
1584 old_segment_id,
1585 old_pointer_section.offset(jj),
1586 );
1587 }
1588
1589 dst = dst.offset(new_step as isize);
1590 src = src.offset(old_step as isize);
1591 }
1592
1593 ptr::write_bytes(
1594 old_ptr.offset(-(BYTES_PER_WORD as isize)),
1595 0,
1596 (u64::from(old_step) * u64::from(element_count)) as usize * BYTES_PER_WORD,
1597 );
1598
1599 Ok(ListBuilder {
1600 arena,
1601 segment_id: new_segment_id,
1602 cap_table,
1603 ptr: new_ptr,
1604 element_count,
1605 element_size: ElementSize::InlineComposite,
1606 step: new_step * BITS_PER_WORD as u32,
1607 struct_data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1608 struct_pointer_count: new_pointer_count,
1609 })
1610 } else {
1611 let old_data_size = data_bits_per_element(old_size);
1614 let old_pointer_count = pointers_per_element(old_size);
1615 let old_step = old_data_size + old_pointer_count * BITS_PER_POINTER as u32;
1616 let element_count = (*old_ref).list_element_count();
1617
1618 if old_size == ElementSize::Void {
1619 Ok(init_struct_list_pointer(
1621 arena,
1622 orig_ref,
1623 orig_segment_id,
1624 cap_table,
1625 element_count,
1626 element_size,
1627 ))
1628 } else {
1629 if old_size == ElementSize::Bit {
1632 return Err(Error::from_kind(
1633 ErrorKind::FoundBitListWhereStructListWasExpected,
1634 ));
1635 }
1636
1637 let mut new_data_size = element_size.data;
1638 let mut new_pointer_count = element_size.pointers;
1639
1640 if old_size == ElementSize::Pointer {
1641 new_pointer_count = ::core::cmp::max(new_pointer_count, 1);
1642 } else {
1643 new_data_size = ::core::cmp::max(new_data_size, 1);
1645 }
1646
1647 let new_step = u32::from(new_data_size)
1648 + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1649 let total_words = element_count * new_step;
1650
1651 zero_pointer_and_fars(arena, orig_segment_id, orig_ref)?;
1653
1654 let (mut new_ptr, new_ref, new_segment_id) = allocate(
1655 arena,
1656 orig_ref,
1657 orig_segment_id,
1658 total_words + POINTER_SIZE_IN_WORDS as u32,
1659 WirePointerKind::List,
1660 );
1661 (*new_ref).set_list_inline_composite(total_words);
1662
1663 let tag: *mut WirePointer = new_ptr as *mut _;
1664 (*tag).set_kind_and_inline_composite_list_element_count(
1665 WirePointerKind::Struct,
1666 element_count,
1667 );
1668 (*tag).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1669 new_ptr = new_ptr.add(BYTES_PER_WORD);
1670
1671 if old_size == ElementSize::Pointer {
1672 let mut dst = new_ptr.offset(new_data_size as isize * BYTES_PER_WORD as isize);
1673 let mut src: *mut WirePointer = old_ptr as *mut _;
1674 for _ in 0..element_count {
1675 transfer_pointer(arena, new_segment_id, dst as *mut _, old_segment_id, src);
1676 dst = dst.offset(new_step as isize * BYTES_PER_WORD as isize);
1677 src = src.offset(1);
1678 }
1679 } else {
1680 let mut dst = new_ptr;
1681 let mut src: *mut u8 = old_ptr;
1682 let old_byte_step = old_data_size / BITS_PER_BYTE as u32;
1683 for _ in 0..element_count {
1684 copy_nonoverlapping_check_zero(src, dst, old_byte_step as usize);
1685 src = src.offset(old_byte_step as isize);
1686 dst = dst.offset(new_step as isize * BYTES_PER_WORD as isize);
1687 }
1688 }
1689
1690 ptr::write_bytes(
1692 old_ptr,
1693 0,
1694 round_bits_up_to_bytes(u64::from(old_step) * u64::from(element_count)) as usize,
1695 );
1696
1697 Ok(ListBuilder {
1698 arena,
1699 segment_id: new_segment_id,
1700 cap_table,
1701 ptr: new_ptr,
1702 element_count,
1703 element_size: ElementSize::InlineComposite,
1704 step: new_step * BITS_PER_WORD as u32,
1705 struct_data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1706 struct_pointer_count: new_pointer_count,
1707 })
1708 }
1709 }
1710 }
1711
1712 #[inline]
1713 pub unsafe fn init_text_pointer(
1714 arena: &mut dyn BuilderArena,
1715 reff: *mut WirePointer,
1716 segment_id: u32,
1717 size: ByteCount32,
1718 ) -> SegmentAnd<text::Builder<'_>> {
1719 let byte_size = size + 1;
1721
1722 let (ptr, reff, segment_id) = allocate(
1724 arena,
1725 reff,
1726 segment_id,
1727 round_bytes_up_to_words(byte_size),
1728 WirePointerKind::List,
1729 );
1730
1731 (*reff).set_list_size_and_count(Byte, byte_size);
1733
1734 SegmentAnd {
1735 segment_id,
1736 value: text::Builder::new(slice::from_raw_parts_mut(ptr, size as usize)),
1737 }
1738 }
1739
1740 #[inline]
1741 pub unsafe fn set_text_pointer<'a>(
1742 arena: &'a mut dyn BuilderArena,
1743 reff: *mut WirePointer,
1744 segment_id: u32,
1745 value: crate::text::Reader<'_>,
1746 ) -> SegmentAnd<text::Builder<'a>> {
1747 let value_bytes = value.as_bytes();
1748 let mut allocation = init_text_pointer(arena, reff, segment_id, value_bytes.len() as u32);
1750 allocation
1751 .value
1752 .reborrow()
1753 .as_bytes_mut()
1754 .copy_from_slice(value_bytes);
1755 allocation
1756 }
1757
1758 #[inline]
1759 pub unsafe fn get_writable_text_pointer<'a>(
1760 arena: &'a mut dyn BuilderArena,
1761 mut reff: *mut WirePointer,
1762 mut segment_id: u32,
1763 default: Option<&'a [crate::Word]>,
1764 ) -> Result<text::Builder<'a>> {
1765 let ref_target = if (*reff).is_null() {
1766 match default {
1767 None => return Ok(text::Builder::new(&mut [])),
1768 Some(d) => {
1769 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1770 arena,
1771 segment_id,
1772 Default::default(),
1773 reff,
1774 d.as_ptr() as *const _,
1775 );
1776 reff = new_reff;
1777 segment_id = new_segment_id;
1778 new_ref_target
1779 }
1780 }
1781 } else {
1782 WirePointer::mut_target(reff)
1783 };
1784
1785 let (ptr, reff, _segment_id) = follow_builder_fars(arena, reff, ref_target, segment_id)?;
1786
1787 if (*reff).kind() != WirePointerKind::List {
1788 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1789 }
1790 if (*reff).list_element_size() != Byte {
1791 return Err(Error::from_kind(
1792 ErrorKind::ExistingListPointerIsNotByteSized,
1793 ));
1794 }
1795
1796 let count = (*reff).list_element_count();
1797 if count == 0 || *ptr.offset((count - 1) as isize) != 0 {
1798 return Err(Error::from_kind(ErrorKind::TextBlobMissingNULTerminator));
1799 }
1800
1801 Ok(text::Builder::with_pos(
1803 slice::from_raw_parts_mut(ptr, (count - 1) as usize),
1804 (count - 1) as usize,
1805 ))
1806 }
1807
1808 #[inline]
1809 pub unsafe fn init_data_pointer(
1810 arena: &mut dyn BuilderArena,
1811 reff: *mut WirePointer,
1812 segment_id: u32,
1813 size: ByteCount32,
1814 ) -> SegmentAnd<data::Builder<'_>> {
1815 let (ptr, reff, segment_id) = allocate(
1817 arena,
1818 reff,
1819 segment_id,
1820 round_bytes_up_to_words(size),
1821 WirePointerKind::List,
1822 );
1823
1824 (*reff).set_list_size_and_count(Byte, size);
1826
1827 SegmentAnd {
1828 segment_id,
1829 value: data::builder_from_raw_parts(ptr, size),
1830 }
1831 }
1832
1833 #[inline]
1834 pub unsafe fn set_data_pointer<'a>(
1835 arena: &'a mut dyn BuilderArena,
1836 reff: *mut WirePointer,
1837 segment_id: u32,
1838 value: &[u8],
1839 ) -> SegmentAnd<data::Builder<'a>> {
1840 let allocation = init_data_pointer(arena, reff, segment_id, value.len() as u32);
1841 copy_nonoverlapping_check_zero(value.as_ptr(), allocation.value.as_mut_ptr(), value.len());
1842 allocation
1843 }
1844
1845 #[inline]
1846 pub unsafe fn get_writable_data_pointer<'a>(
1847 arena: &'a mut dyn BuilderArena,
1848 mut reff: *mut WirePointer,
1849 mut segment_id: u32,
1850 default: Option<&'a [crate::Word]>,
1851 ) -> Result<data::Builder<'a>> {
1852 let ref_target = if (*reff).is_null() {
1853 match default {
1854 None => return Ok(&mut []),
1855 Some(d) => {
1856 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1857 arena,
1858 segment_id,
1859 Default::default(),
1860 reff,
1861 d.as_ptr() as *const _,
1862 );
1863 reff = new_reff;
1864 segment_id = new_segment_id;
1865 new_ref_target
1866 }
1867 }
1868 } else {
1869 WirePointer::mut_target(reff)
1870 };
1871
1872 let (ptr, reff, _segment_id) = follow_builder_fars(arena, reff, ref_target, segment_id)?;
1873
1874 if (*reff).kind() != WirePointerKind::List {
1875 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1876 }
1877 if (*reff).list_element_size() != Byte {
1878 return Err(Error::from_kind(
1879 ErrorKind::ExistingListPointerIsNotByteSized,
1880 ));
1881 }
1882
1883 Ok(data::builder_from_raw_parts(
1884 ptr,
1885 (*reff).list_element_count(),
1886 ))
1887 }
1888
1889 pub unsafe fn set_struct_pointer(
1890 arena: &mut dyn BuilderArena,
1891 segment_id: u32,
1892 cap_table: CapTableBuilder,
1893 reff: *mut WirePointer,
1894 value: StructReader,
1895 canonicalize: bool,
1896 ) -> Result<SegmentAnd<*mut u8>> {
1897 let mut data_size: ByteCount32 = round_bits_up_to_bytes(u64::from(value.data_size));
1898 let mut ptr_count = value.pointer_count;
1899
1900 if canonicalize {
1901 if !(value.data_size == 1 || value.data_size % BITS_PER_BYTE as u32 == 0) {
1903 return Err(Error::from_kind(
1904 ErrorKind::StructReaderHadBitwidthOtherThan1,
1905 ));
1906 }
1907
1908 if value.data_size == 1 {
1909 if !value.get_bool_field(0) {
1910 data_size = 0;
1911 }
1912 } else {
1913 'chop: while data_size != 0 {
1914 let end = data_size;
1915 let mut window = data_size % BYTES_PER_WORD as u32;
1916 if window == 0 {
1917 window = BYTES_PER_WORD as u32;
1918 }
1919 let start = end - window;
1920 let last_word = &value.get_data_section_as_blob()[start as usize..end as usize];
1921 if last_word == [0; 8] {
1922 data_size -= window;
1923 } else {
1924 break 'chop;
1925 }
1926 }
1927 }
1928
1929 while ptr_count != 0 && value.get_pointer_field(ptr_count as usize - 1).is_null() {
1930 ptr_count -= 1;
1931 }
1932 }
1933
1934 let data_words = round_bytes_up_to_words(data_size);
1935 let total_size: WordCount32 = data_words + u32::from(ptr_count) * WORDS_PER_POINTER as u32;
1936
1937 let (ptr, reff, segment_id) =
1938 allocate(arena, reff, segment_id, total_size, WirePointerKind::Struct);
1939 (*reff).set_struct_size_from_pieces(data_words as u16, ptr_count);
1940
1941 if value.data_size == 1 {
1942 if data_size != 0 {
1944 *ptr = u8::from(value.get_bool_field(0))
1945 }
1946 } else {
1947 copy_nonoverlapping_check_zero::<u8>(value.data, ptr, data_size as usize);
1948 }
1949
1950 let pointer_section: *mut WirePointer =
1951 ptr.offset(data_words as isize * BYTES_PER_WORD as isize) as *mut _;
1952 for i in 0..ptr_count as isize {
1953 copy_pointer(
1954 arena,
1955 segment_id,
1956 cap_table,
1957 pointer_section.offset(i),
1958 value.arena,
1959 value.segment_id,
1960 value.cap_table,
1961 value.pointers.offset(i),
1962 value.nesting_limit,
1963 canonicalize,
1964 )?;
1965 }
1966
1967 Ok(SegmentAnd {
1968 segment_id,
1969 value: ptr,
1970 })
1971 }
1972
1973 #[cfg(feature = "alloc")]
1974 pub unsafe fn set_capability_pointer(
1975 _arena: &mut dyn BuilderArena,
1976 _segment_id: u32,
1977 mut cap_table: CapTableBuilder,
1978 reff: *mut WirePointer,
1979 cap: alloc::boxed::Box<dyn ClientHook>,
1980 ) {
1981 (*reff).set_cap(cap_table.inject_cap(cap) as u32);
1983 }
1984
1985 pub unsafe fn set_list_pointer(
1986 arena: &mut dyn BuilderArena,
1987 segment_id: u32,
1988 cap_table: CapTableBuilder,
1989 reff: *mut WirePointer,
1990 value: ListReader,
1991 canonicalize: bool,
1992 ) -> Result<SegmentAnd<*mut u8>> {
1993 let total_size =
1994 round_bits_up_to_words(u64::from(value.element_count) * u64::from(value.step));
1995
1996 if value.element_size != ElementSize::InlineComposite {
1997 let (ptr, reff, segment_id) =
1999 allocate(arena, reff, segment_id, total_size, WirePointerKind::List);
2000
2001 if value.struct_pointer_count == 1 {
2002 (*reff).set_list_size_and_count(Pointer, value.element_count);
2004 for i in 0..value.element_count as isize {
2005 copy_pointer(
2006 arena,
2007 segment_id,
2008 cap_table,
2009 (ptr as *mut WirePointer).offset(i),
2010 value.arena,
2011 value.segment_id,
2012 value.cap_table,
2013 (value.ptr as *const WirePointer).offset(i),
2014 value.nesting_limit,
2015 canonicalize,
2016 )?;
2017 }
2018 } else {
2019 let element_size = match value.step {
2021 0 => Void,
2022 1 => Bit,
2023 8 => Byte,
2024 16 => TwoBytes,
2025 32 => FourBytes,
2026 64 => EightBytes,
2027 _ => {
2028 panic!("invalid list step size: {}", value.step)
2029 }
2030 };
2031
2032 (*reff).set_list_size_and_count(element_size, value.element_count);
2033
2034 let whole_byte_size =
2039 u64::from(value.element_count) * u64::from(value.step) / BITS_PER_BYTE as u64;
2040 copy_nonoverlapping_check_zero(value.ptr, ptr, whole_byte_size as usize);
2041
2042 let leftover_bits =
2043 u64::from(value.element_count) * u64::from(value.step) % BITS_PER_BYTE as u64;
2044 if leftover_bits > 0 {
2045 let mask: u8 = (1 << leftover_bits as u8) - 1;
2046
2047 *ptr.offset(whole_byte_size as isize) =
2048 mask & (*value.ptr.offset(whole_byte_size as isize))
2049 }
2050 }
2051
2052 Ok(SegmentAnd {
2053 segment_id,
2054 value: ptr,
2055 })
2056 } else {
2057 let decl_data_size = value.struct_data_size / BITS_PER_WORD as u32;
2060 let decl_pointer_count = value.struct_pointer_count;
2061
2062 let mut data_size = 0;
2063 let mut ptr_count = 0;
2064 let mut total_size = total_size;
2065
2066 if canonicalize {
2067 for ec in 0..value.element_count {
2068 let se = value.get_struct_element(ec);
2069 let mut local_data_size = decl_data_size;
2070 'data_chop: while local_data_size != 0 {
2071 let end = local_data_size * BYTES_PER_WORD as u32;
2072 let window = BYTES_PER_WORD as u32;
2073 let start = end - window;
2074 let last_word =
2075 &se.get_data_section_as_blob()[start as usize..end as usize];
2076 if last_word != [0; 8] {
2077 break 'data_chop;
2078 } else {
2079 local_data_size -= 1;
2080 }
2081 }
2082 if local_data_size > data_size {
2083 data_size = local_data_size;
2084 }
2085 let mut local_ptr_count = decl_pointer_count;
2086 while local_ptr_count != 0
2087 && se.get_pointer_field(local_ptr_count as usize - 1).is_null()
2088 {
2089 local_ptr_count -= 1;
2090 }
2091 if local_ptr_count > ptr_count {
2092 ptr_count = local_ptr_count;
2093 }
2094 }
2095 total_size = (data_size + u32::from(ptr_count)) * value.element_count;
2096 } else {
2097 data_size = decl_data_size;
2098 ptr_count = decl_pointer_count;
2099 }
2100
2101 let (ptr, reff, segment_id) = allocate(
2102 arena,
2103 reff,
2104 segment_id,
2105 total_size + POINTER_SIZE_IN_WORDS as u32,
2106 WirePointerKind::List,
2107 );
2108 (*reff).set_list_inline_composite(total_size);
2109
2110 let tag: *mut WirePointer = ptr as *mut _;
2111 (*tag).set_kind_and_inline_composite_list_element_count(
2112 WirePointerKind::Struct,
2113 value.element_count,
2114 );
2115 (*tag).set_struct_size_from_pieces(data_size as u16, ptr_count);
2116 let mut dst = ptr.add(BYTES_PER_WORD);
2117
2118 let mut src: *const u8 = value.ptr;
2119 for _ in 0..value.element_count {
2120 copy_nonoverlapping_check_zero(src, dst, data_size as usize * BYTES_PER_WORD);
2121 dst = dst.offset(data_size as isize * BYTES_PER_WORD as isize);
2122 src = src.offset(decl_data_size as isize * BYTES_PER_WORD as isize);
2123
2124 for _ in 0..ptr_count {
2125 copy_pointer(
2126 arena,
2127 segment_id,
2128 cap_table,
2129 dst as *mut _,
2130 value.arena,
2131 value.segment_id,
2132 value.cap_table,
2133 src as *const WirePointer,
2134 value.nesting_limit,
2135 canonicalize,
2136 )?;
2137 dst = dst.add(BYTES_PER_WORD);
2138 src = src.add(BYTES_PER_WORD);
2139 }
2140
2141 src =
2142 src.offset((decl_pointer_count - ptr_count) as isize * BYTES_PER_WORD as isize);
2143 }
2144 Ok(SegmentAnd {
2145 segment_id,
2146 value: ptr,
2147 })
2148 }
2149 }
2150
2151 #[allow(clippy::too_many_arguments)]
2152 pub unsafe fn copy_pointer(
2153 dst_arena: &mut dyn BuilderArena,
2154 dst_segment_id: u32,
2155 dst_cap_table: CapTableBuilder,
2156 dst: *mut WirePointer,
2157 src_arena: &dyn ReaderArena,
2158 src_segment_id: u32,
2159 src_cap_table: CapTableReader,
2160 src: *const WirePointer,
2161 nesting_limit: i32,
2162 canonicalize: bool,
2163 ) -> Result<SegmentAnd<*mut u8>> {
2164 if (*src).is_null() {
2165 ptr::write_bytes(dst, 0, 1);
2166 return Ok(SegmentAnd {
2167 segment_id: dst_segment_id,
2168 value: ptr::null_mut(),
2169 });
2170 }
2171
2172 let (mut ptr, src, src_segment_id) = follow_fars(src_arena, src, src_segment_id)?;
2173
2174 match (*src).kind() {
2175 WirePointerKind::Struct => {
2176 if nesting_limit <= 0 {
2177 return Err(Error::from_kind(
2178 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2179 ));
2180 }
2181
2182 bounds_check(
2183 src_arena,
2184 src_segment_id,
2185 ptr,
2186 (*src).struct_word_size() as usize,
2187 WirePointerKind::Struct,
2188 )?;
2189
2190 set_struct_pointer(
2191 dst_arena,
2192 dst_segment_id,
2193 dst_cap_table,
2194 dst,
2195 StructReader {
2196 arena: src_arena,
2197 segment_id: src_segment_id,
2198 cap_table: src_cap_table,
2199 data: ptr,
2200 pointers: ptr
2201 .offset((*src).struct_data_size() as isize * BYTES_PER_WORD as isize)
2202 as *const _,
2203 data_size: u32::from((*src).struct_data_size()) * BITS_PER_WORD as u32,
2204 pointer_count: (*src).struct_ptr_count(),
2205 nesting_limit: nesting_limit - 1,
2206 },
2207 canonicalize,
2208 )
2209 }
2210 WirePointerKind::List => {
2211 let element_size = (*src).list_element_size();
2212 if nesting_limit <= 0 {
2213 return Err(Error::from_kind(
2214 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2215 ));
2216 }
2217
2218 if element_size == InlineComposite {
2219 let word_count = (*src).list_inline_composite_word_count();
2220 let tag: *const WirePointer = ptr as *const _;
2221 ptr = ptr.add(BYTES_PER_WORD);
2222
2223 bounds_check(
2224 src_arena,
2225 src_segment_id,
2226 ptr.offset(-(BYTES_PER_WORD as isize)),
2227 word_count as usize + 1,
2228 WirePointerKind::List,
2229 )?;
2230
2231 if (*tag).kind() != WirePointerKind::Struct {
2232 return Err(Error::from_kind(
2233 ErrorKind::InlineCompositeListsOfNonStructTypeAreNotSupported,
2234 ));
2235 }
2236
2237 let element_count = (*tag).inline_composite_list_element_count();
2238 let words_per_element = (*tag).struct_word_size();
2239
2240 if u64::from(words_per_element) * u64::from(element_count)
2241 > u64::from(word_count)
2242 {
2243 return Err(Error::from_kind(
2244 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
2245 ));
2246 }
2247
2248 if words_per_element == 0 {
2249 amplified_read(src_arena, u64::from(element_count))?;
2252 }
2253
2254 set_list_pointer(
2255 dst_arena,
2256 dst_segment_id,
2257 dst_cap_table,
2258 dst,
2259 ListReader {
2260 arena: src_arena,
2261 segment_id: src_segment_id,
2262 cap_table: src_cap_table,
2263 ptr: ptr as *const _,
2264 element_count,
2265 element_size,
2266 step: words_per_element * BITS_PER_WORD as u32,
2267 struct_data_size: u32::from((*tag).struct_data_size())
2268 * BITS_PER_WORD as u32,
2269 struct_pointer_count: (*tag).struct_ptr_count(),
2270 nesting_limit: nesting_limit - 1,
2271 },
2272 canonicalize,
2273 )
2274 } else {
2275 let data_size = data_bits_per_element(element_size);
2276 let pointer_count = pointers_per_element(element_size);
2277 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
2278 let element_count = (*src).list_element_count();
2279 let word_count =
2280 round_bits_up_to_words(u64::from(element_count) * u64::from(step));
2281
2282 bounds_check(
2283 src_arena,
2284 src_segment_id,
2285 ptr,
2286 word_count as usize,
2287 WirePointerKind::List,
2288 )?;
2289
2290 if element_size == Void {
2291 amplified_read(src_arena, u64::from(element_count))?;
2294 }
2295
2296 set_list_pointer(
2297 dst_arena,
2298 dst_segment_id,
2299 dst_cap_table,
2300 dst,
2301 ListReader {
2302 arena: src_arena,
2303 segment_id: src_segment_id,
2304 cap_table: src_cap_table,
2305 ptr: ptr as *const _,
2306 element_count,
2307 element_size,
2308 step,
2309 struct_data_size: data_size,
2310 struct_pointer_count: pointer_count as u16,
2311 nesting_limit: nesting_limit - 1,
2312 },
2313 canonicalize,
2314 )
2315 }
2316 }
2317 WirePointerKind::Far => Err(Error::from_kind(ErrorKind::MalformedDoubleFarPointer)),
2318 WirePointerKind::Other => {
2319 if !(*src).is_capability() {
2320 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
2321 }
2322 if canonicalize {
2323 return Err(Error::from_kind(
2324 ErrorKind::CannotCreateACanonicalMessageWithACapability,
2325 ));
2326 }
2327 #[cfg(feature = "alloc")]
2328 match src_cap_table.extract_cap((*src).cap_index() as usize) {
2329 Some(cap) => {
2330 set_capability_pointer(dst_arena, dst_segment_id, dst_cap_table, dst, cap);
2331 Ok(SegmentAnd {
2332 segment_id: dst_segment_id,
2333 value: ptr::null_mut(),
2334 })
2335 }
2336 None => Err(Error::from_kind(
2337 ErrorKind::MessageContainsInvalidCapabilityPointer,
2338 )),
2339 }
2340 #[cfg(not(feature = "alloc"))]
2341 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
2342 }
2343 }
2344 }
2345
2346 #[inline]
2347 pub unsafe fn read_struct_pointer<'a>(
2348 mut arena: &'a dyn ReaderArena,
2349 mut segment_id: u32,
2350 cap_table: CapTableReader,
2351 mut reff: *const WirePointer,
2352 default: Option<&'a [crate::Word]>,
2353 nesting_limit: i32,
2354 ) -> Result<StructReader<'a>> {
2355 if (*reff).is_null() {
2356 match default {
2357 None => return Ok(StructReader::new_default()),
2358 Some(d) if (*(d.as_ptr() as *const WirePointer)).is_null() => {
2359 return Ok(StructReader::new_default())
2360 }
2361 Some(d) => {
2362 reff = d.as_ptr() as *const _;
2363 arena = &super::NULL_ARENA;
2364 segment_id = 0;
2365 }
2366 }
2367 }
2368
2369 if nesting_limit <= 0 {
2370 return Err(Error::from_kind(
2371 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2372 ));
2373 }
2374
2375 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2376
2377 let data_size_words = (*reff).struct_data_size();
2378
2379 if (*reff).kind() != WirePointerKind::Struct {
2380 return Err(Error::from_kind(
2381 ErrorKind::MessageContainsNonStructPointerWhereStructPointerWasExpected,
2382 ));
2383 }
2384
2385 bounds_check(
2386 arena,
2387 segment_id,
2388 ptr,
2389 (*reff).struct_word_size() as usize,
2390 WirePointerKind::Struct,
2391 )?;
2392
2393 Ok(StructReader {
2394 arena,
2395 segment_id,
2396 cap_table,
2397 data: ptr,
2398 pointers: ptr.offset(data_size_words as isize * BYTES_PER_WORD as isize) as *const _,
2399 data_size: u32::from(data_size_words) * BITS_PER_WORD as BitCount32,
2400 pointer_count: (*reff).struct_ptr_count(),
2401 nesting_limit: nesting_limit - 1,
2402 })
2403 }
2404
2405 #[inline]
2406 #[cfg(feature = "alloc")]
2407 pub unsafe fn read_capability_pointer(
2408 _arena: &dyn ReaderArena,
2409 _segment_id: u32,
2410 cap_table: CapTableReader,
2411 reff: *const WirePointer,
2412 _nesting_limit: i32,
2413 ) -> Result<alloc::boxed::Box<dyn ClientHook>> {
2414 if (*reff).is_null() {
2415 Err(Error::from_kind(
2416 ErrorKind::MessageContainsNullCapabilityPointer,
2417 ))
2418 } else if !(*reff).is_capability() {
2419 Err(Error::from_kind(
2420 ErrorKind::MessageContainsNonCapabilityPointerWhereCapabilityPointerWasExpected,
2421 ))
2422 } else {
2423 let n = (*reff).cap_index() as usize;
2424 match cap_table.extract_cap(n) {
2425 Some(client_hook) => Ok(client_hook),
2426 None => Err(Error::from_kind(
2427 ErrorKind::MessageContainsInvalidCapabilityPointer,
2428 )),
2429 }
2430 }
2431 }
2432
2433 #[inline]
2434 pub unsafe fn read_list_pointer(
2435 mut arena: &dyn ReaderArena,
2436 mut segment_id: u32,
2437 cap_table: CapTableReader,
2438 mut reff: *const WirePointer,
2439 default_value: *const u8,
2440 expected_element_size: Option<ElementSize>,
2441 nesting_limit: i32,
2442 ) -> Result<ListReader<'_>> {
2443 if (*reff).is_null() {
2444 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
2445 return Ok(ListReader::new_default());
2446 }
2447 reff = default_value as *const _;
2448 arena = &super::NULL_ARENA;
2449 segment_id = 0;
2450 }
2451
2452 if nesting_limit <= 0 {
2453 return Err(Error::from_kind(ErrorKind::NestingLimitExceeded));
2454 }
2455 let (mut ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2456
2457 if (*reff).kind() != WirePointerKind::List {
2458 return Err(Error::from_kind(
2459 ErrorKind::MessageContainsNonListPointerWhereListPointerWasExpected,
2460 ));
2461 }
2462
2463 let element_size = (*reff).list_element_size();
2464 match element_size {
2465 InlineComposite => {
2466 let word_count = (*reff).list_inline_composite_word_count();
2467
2468 let tag: *const WirePointer = ptr as *const WirePointer;
2469
2470 ptr = ptr.add(BYTES_PER_WORD);
2471
2472 bounds_check(
2473 arena,
2474 segment_id,
2475 ptr.offset(-(BYTES_PER_WORD as isize)),
2476 word_count as usize + 1,
2477 WirePointerKind::List,
2478 )?;
2479
2480 if (*tag).kind() != WirePointerKind::Struct {
2481 return Err(Error::from_kind(
2482 ErrorKind::InlineCompositeListsOfNonStructTypeAreNotSupported,
2483 ));
2484 }
2485
2486 let size = (*tag).inline_composite_list_element_count();
2487 let data_size = (*tag).struct_data_size();
2488 let ptr_count = (*tag).struct_ptr_count();
2489 let words_per_element = (*tag).struct_word_size();
2490
2491 if u64::from(size) * u64::from(words_per_element) > u64::from(word_count) {
2492 return Err(Error::from_kind(
2493 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
2494 ));
2495 }
2496
2497 if words_per_element == 0 {
2498 amplified_read(arena, u64::from(size))?;
2501 }
2502
2503 match expected_element_size {
2510 None | Some(Void | InlineComposite) => (),
2511 Some(Bit) => {
2512 return Err(Error::from_kind(
2513 ErrorKind::FoundStructListWhereBitListWasExpected,
2514 ));
2515 }
2516 Some(Byte | TwoBytes | FourBytes | EightBytes) => {
2517 if data_size == 0 {
2518 return Err(Error::from_kind(
2519 ErrorKind::ExpectedAPrimitiveListButGotAListOfPointerOnlyStructs,
2520 ));
2521 }
2522 }
2523 Some(Pointer) => {
2524 if ptr_count == 0 {
2525 return Err(Error::from_kind(
2526 ErrorKind::ExpectedAPointerListButGotAListOfDataOnlyStructs,
2527 ));
2528 }
2529 }
2530 }
2531
2532 Ok(ListReader {
2533 arena,
2534 segment_id,
2535 cap_table,
2536 ptr: ptr as *const _,
2537 element_count: size,
2538 element_size,
2539 step: words_per_element * BITS_PER_WORD as u32,
2540 struct_data_size: u32::from(data_size) * (BITS_PER_WORD as u32),
2541 struct_pointer_count: ptr_count,
2542 nesting_limit: nesting_limit - 1,
2543 })
2544 }
2545 _ => {
2546 let data_size = data_bits_per_element((*reff).list_element_size());
2550 let pointer_count = pointers_per_element((*reff).list_element_size());
2551 let element_count = (*reff).list_element_count();
2552 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
2553
2554 let word_count = round_bits_up_to_words(u64::from(element_count) * u64::from(step));
2555 bounds_check(
2556 arena,
2557 segment_id,
2558 ptr,
2559 word_count as usize,
2560 WirePointerKind::List,
2561 )?;
2562
2563 if element_size == Void {
2564 amplified_read(arena, u64::from(element_count))?;
2567 }
2568
2569 if let Some(expected_element_size) = expected_element_size {
2570 if element_size == ElementSize::Bit && expected_element_size != ElementSize::Bit
2571 {
2572 return Err(Error::from_kind(
2573 ErrorKind::FoundBitListWhereStructListWasExpected,
2574 ));
2575 }
2576
2577 let expected_data_bits_per_element =
2583 data_bits_per_element(expected_element_size);
2584 let expected_pointers_per_element = pointers_per_element(expected_element_size);
2585
2586 if expected_data_bits_per_element > data_size
2587 || expected_pointers_per_element > pointer_count
2588 {
2589 return Err(Error::from_kind(
2590 ErrorKind::MessageContainsListWithIncompatibleElementType,
2591 ));
2592 }
2593 }
2594
2595 Ok(ListReader {
2596 arena,
2597 segment_id,
2598 cap_table,
2599 ptr: ptr as *const _,
2600 element_count,
2601 element_size,
2602 step,
2603 struct_data_size: data_size,
2604 struct_pointer_count: pointer_count as u16,
2605 nesting_limit: nesting_limit - 1,
2606 })
2607 }
2608 }
2609 }
2610
2611 #[inline]
2612 pub unsafe fn read_text_pointer<'a>(
2613 mut arena: &'a dyn ReaderArena,
2614 mut segment_id: u32,
2615 mut reff: *const WirePointer,
2616 default: Option<&[crate::Word]>,
2617 ) -> Result<text::Reader<'a>> {
2618 if (*reff).is_null() {
2619 match default {
2620 None => return Ok("".into()),
2621 Some(d) => {
2622 reff = d.as_ptr() as *const WirePointer;
2623 arena = &super::NULL_ARENA;
2624 segment_id = 0;
2625 }
2626 }
2627 }
2628
2629 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2630 let size = (*reff).list_element_count();
2631
2632 if (*reff).kind() != WirePointerKind::List {
2633 return Err(Error::from_kind(
2634 ErrorKind::MessageContainsNonListPointerWhereTextWasExpected,
2635 ));
2636 }
2637
2638 if (*reff).list_element_size() != Byte {
2639 return Err(Error::from_kind(
2640 ErrorKind::MessageContainsListPointerOfNonBytesWhereTextWasExpected,
2641 ));
2642 }
2643
2644 bounds_check(
2645 arena,
2646 segment_id,
2647 ptr,
2648 round_bytes_up_to_words(size) as usize,
2649 WirePointerKind::List,
2650 )?;
2651
2652 if size == 0 {
2653 return Err(Error::from_kind(
2654 ErrorKind::MessageContainsTextThatIsNotNULTerminated,
2655 ));
2656 }
2657
2658 let str_ptr = ptr;
2659
2660 if (*str_ptr.offset((size - 1) as isize)) != 0u8 {
2661 return Err(Error::from_kind(
2662 ErrorKind::MessageContainsTextThatIsNotNULTerminated,
2663 ));
2664 }
2665
2666 Ok(text::Reader(slice::from_raw_parts(
2667 str_ptr,
2668 size as usize - 1,
2669 )))
2670 }
2671
2672 #[inline]
2673 pub unsafe fn read_data_pointer<'a>(
2674 mut arena: &'a dyn ReaderArena,
2675 mut segment_id: u32,
2676 mut reff: *const WirePointer,
2677 default: Option<&'a [crate::Word]>,
2678 ) -> Result<data::Reader<'a>> {
2679 if (*reff).is_null() {
2680 match default {
2681 None => return Ok(&[]),
2682 Some(d) => {
2683 reff = d.as_ptr() as *const WirePointer;
2684 arena = &super::NULL_ARENA;
2685 segment_id = 0;
2686 }
2687 }
2688 }
2689
2690 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2691
2692 let size: u32 = (*reff).list_element_count();
2693
2694 if (*reff).kind() != WirePointerKind::List {
2695 return Err(Error::from_kind(
2696 ErrorKind::MessageContainsNonListPointerWhereDataWasExpected,
2697 ));
2698 }
2699
2700 if (*reff).list_element_size() != Byte {
2701 return Err(Error::from_kind(
2702 ErrorKind::MessageContainsListPointerOfNonBytesWhereDataWasExpected,
2703 ));
2704 }
2705
2706 bounds_check(
2707 arena,
2708 segment_id,
2709 ptr,
2710 round_bytes_up_to_words(size) as usize,
2711 WirePointerKind::List,
2712 )?;
2713
2714 Ok(data::reader_from_raw_parts(ptr as *const _, size))
2715 }
2716}
2717
2718static ZERO: u64 = 0;
2719fn zero_pointer() -> *const WirePointer {
2720 &ZERO as *const _ as *const _
2721}
2722
2723static NULL_ARENA: NullArena = NullArena;
2724
2725#[cfg(feature = "alloc")]
2726pub type CapTable = alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>>;
2727
2728#[cfg(not(feature = "alloc"))]
2729pub struct CapTable;
2730
2731#[derive(Copy, Clone)]
2732pub enum CapTableReader {
2733 Plain(*const CapTable),
2737}
2738
2739impl Default for CapTableReader {
2740 fn default() -> Self {
2741 CapTableReader::Plain(ptr::null())
2742 }
2743}
2744
2745#[cfg(feature = "alloc")]
2746impl CapTableReader {
2747 pub fn extract_cap(&self, index: usize) -> Option<alloc::boxed::Box<dyn ClientHook>> {
2748 match *self {
2749 Self::Plain(hooks) => {
2750 if hooks.is_null() {
2751 return None;
2752 }
2753 let hooks: &alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2754 unsafe { &*hooks };
2755 if index >= hooks.len() {
2756 None
2757 } else {
2758 hooks[index].as_ref().map(|hook| hook.add_ref())
2759 }
2760 }
2761 }
2762 }
2763}
2764
2765#[derive(Copy, Clone)]
2766pub enum CapTableBuilder {
2767 Plain(*mut CapTable),
2771}
2772
2773impl Default for CapTableBuilder {
2774 fn default() -> Self {
2775 CapTableBuilder::Plain(ptr::null_mut())
2776 }
2777}
2778
2779impl CapTableBuilder {
2780 pub fn into_reader(self) -> CapTableReader {
2781 match self {
2782 Self::Plain(hooks) => CapTableReader::Plain(hooks),
2783 }
2784 }
2785
2786 #[cfg(feature = "alloc")]
2787 pub fn extract_cap(&self, index: usize) -> Option<alloc::boxed::Box<dyn ClientHook>> {
2788 match *self {
2789 Self::Plain(hooks) => {
2790 if hooks.is_null() {
2791 return None;
2792 }
2793 let hooks: &alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2794 unsafe { &*hooks };
2795 if index >= hooks.len() {
2796 None
2797 } else {
2798 hooks[index].as_ref().map(|hook| hook.add_ref())
2799 }
2800 }
2801 }
2802 }
2803
2804 #[cfg(feature = "alloc")]
2805 pub fn inject_cap(&mut self, cap: alloc::boxed::Box<dyn ClientHook>) -> usize {
2806 match *self {
2807 Self::Plain(hooks) => {
2808 if hooks.is_null() {
2809 panic!(
2810 "Called inject_cap() on a null capability table. You need \
2811 to call imbue_mut() on this message before adding capabilities."
2812 );
2813 }
2814 let hooks: &mut alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2815 unsafe { &mut *hooks };
2816 hooks.push(Some(cap));
2817 hooks.len() - 1
2818 }
2819 }
2820 }
2821
2822 #[cfg(feature = "alloc")]
2823 pub fn drop_cap(&mut self, index: usize) {
2824 match *self {
2825 Self::Plain(hooks) => {
2826 if hooks.is_null() {
2827 panic!(
2828 "Called drop_cap() on a null capability table. You need \
2829 to call imbue_mut() on this message before adding capabilities."
2830 );
2831 }
2832 let hooks: &mut alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2833 unsafe { &mut *hooks };
2834 if index < hooks.len() {
2835 hooks[index] = None;
2836 }
2837 }
2838 }
2839 }
2840}
2841
2842#[derive(Clone, Copy)]
2843pub struct PointerReader<'a> {
2844 arena: &'a dyn ReaderArena,
2845 cap_table: CapTableReader,
2846 pointer: *const WirePointer,
2847 segment_id: u32,
2848 nesting_limit: i32,
2849}
2850
2851impl<'a> PointerReader<'a> {
2852 pub fn new_default<'b>() -> PointerReader<'b> {
2853 PointerReader {
2854 arena: &NULL_ARENA,
2855 segment_id: 0,
2856 cap_table: Default::default(),
2857 pointer: ptr::null(),
2858 nesting_limit: 0x7fffffff,
2859 }
2860 }
2861
2862 pub unsafe fn get_root(
2863 arena: &'a dyn ReaderArena,
2864 segment_id: u32,
2865 location: *const u8,
2866 nesting_limit: i32,
2867 ) -> Result<Self> {
2868 wire_helpers::bounds_check(
2869 arena,
2870 segment_id,
2871 location as *const _,
2872 POINTER_SIZE_IN_WORDS,
2873 WirePointerKind::Struct,
2874 )?;
2875
2876 Ok(PointerReader {
2877 arena,
2878 segment_id,
2879 cap_table: Default::default(),
2880 pointer: location as *const _,
2881 nesting_limit,
2882 })
2883 }
2884
2885 pub fn reborrow(&self) -> PointerReader<'_> {
2886 PointerReader {
2887 arena: self.arena,
2888 ..*self
2889 }
2890 }
2891
2892 pub unsafe fn get_root_unchecked<'b>(location: *const u8) -> PointerReader<'b> {
2893 PointerReader {
2894 arena: &NULL_ARENA,
2895 segment_id: 0,
2896 cap_table: Default::default(),
2897 pointer: location as *const _,
2898 nesting_limit: 0x7fffffff,
2899 }
2900 }
2901
2902 pub fn imbue(&mut self, cap_table: CapTableReader) {
2903 self.cap_table = cap_table;
2904 }
2905
2906 #[inline]
2907 pub fn is_null(&self) -> bool {
2908 self.pointer.is_null() || unsafe { (*self.pointer).is_null() }
2909 }
2910
2911 pub fn total_size(&self) -> Result<MessageSize> {
2912 if self.pointer.is_null() {
2913 Ok(MessageSize {
2914 word_count: 0,
2915 cap_count: 0,
2916 })
2917 } else {
2918 unsafe {
2919 wire_helpers::total_size(
2920 self.arena,
2921 self.segment_id,
2922 self.pointer,
2923 self.nesting_limit,
2924 )
2925 }
2926 }
2927 }
2928
2929 pub fn get_struct(self, default: Option<&'a [crate::Word]>) -> Result<StructReader<'a>> {
2930 let reff: *const WirePointer = if self.pointer.is_null() {
2931 zero_pointer()
2932 } else {
2933 self.pointer
2934 };
2935 unsafe {
2936 wire_helpers::read_struct_pointer(
2937 self.arena,
2938 self.segment_id,
2939 self.cap_table,
2940 reff,
2941 default,
2942 self.nesting_limit,
2943 )
2944 }
2945 }
2946
2947 pub fn get_list(
2948 self,
2949 expected_element_size: ElementSize,
2950 default: Option<&'a [crate::Word]>,
2951 ) -> Result<ListReader<'a>> {
2952 let default_value: *const u8 = match default {
2953 None => core::ptr::null(),
2954 Some(d) => d.as_ptr() as *const u8,
2955 };
2956 let reff = if self.pointer.is_null() {
2957 zero_pointer()
2958 } else {
2959 self.pointer
2960 };
2961 unsafe {
2962 wire_helpers::read_list_pointer(
2963 self.arena,
2964 self.segment_id,
2965 self.cap_table,
2966 reff,
2967 default_value,
2968 Some(expected_element_size),
2969 self.nesting_limit,
2970 )
2971 }
2972 }
2973
2974 fn get_list_any_size(self, default_value: *const u8) -> Result<ListReader<'a>> {
2975 let reff = if self.pointer.is_null() {
2976 zero_pointer()
2977 } else {
2978 self.pointer
2979 };
2980 unsafe {
2981 wire_helpers::read_list_pointer(
2982 self.arena,
2983 self.segment_id,
2984 self.cap_table,
2985 reff,
2986 default_value,
2987 None,
2988 self.nesting_limit,
2989 )
2990 }
2991 }
2992
2993 pub fn get_text(self, default: Option<&[crate::Word]>) -> Result<text::Reader<'a>> {
2994 let reff = if self.pointer.is_null() {
2995 zero_pointer()
2996 } else {
2997 self.pointer
2998 };
2999 unsafe { wire_helpers::read_text_pointer(self.arena, self.segment_id, reff, default) }
3000 }
3001
3002 pub fn get_data(&self, default: Option<&'a [crate::Word]>) -> Result<data::Reader<'a>> {
3003 let reff = if self.pointer.is_null() {
3004 zero_pointer()
3005 } else {
3006 self.pointer
3007 };
3008 unsafe { wire_helpers::read_data_pointer(self.arena, self.segment_id, reff, default) }
3009 }
3010
3011 #[cfg(feature = "alloc")]
3012 pub fn get_capability(&self) -> Result<alloc::boxed::Box<dyn ClientHook>> {
3013 let reff: *const WirePointer = if self.pointer.is_null() {
3014 zero_pointer()
3015 } else {
3016 self.pointer
3017 };
3018 unsafe {
3019 wire_helpers::read_capability_pointer(
3020 self.arena,
3021 self.segment_id,
3022 self.cap_table,
3023 reff,
3024 self.nesting_limit,
3025 )
3026 }
3027 }
3028
3029 pub fn get_pointer_type(&self) -> Result<PointerType> {
3030 if self.is_null() {
3031 Ok(PointerType::Null)
3032 } else {
3033 let (_, reff, _) =
3034 unsafe { wire_helpers::follow_fars(self.arena, self.pointer, self.segment_id)? };
3035
3036 match unsafe { (*reff).kind() } {
3037 WirePointerKind::Far => Err(Error::from_kind(ErrorKind::UnexpectedFarPointer)),
3038 WirePointerKind::Struct => Ok(PointerType::Struct),
3039 WirePointerKind::List => Ok(PointerType::List),
3040 WirePointerKind::Other => {
3041 if unsafe { (*reff).is_capability() } {
3042 Ok(PointerType::Capability)
3043 } else {
3044 Err(Error::from_kind(ErrorKind::UnknownPointerType))
3045 }
3046 }
3047 }
3048 }
3049 }
3050
3051 pub fn is_canonical(&self, read_head: &Cell<*const u8>) -> Result<bool> {
3052 if self.pointer.is_null() || unsafe { !(*self.pointer).is_positional() } {
3053 return Ok(false);
3054 }
3055
3056 match self.get_pointer_type()? {
3057 PointerType::Null => Ok(true),
3058 PointerType::Struct => {
3059 let mut data_trunc = false;
3060 let mut ptr_trunc = false;
3061 let st = self.get_struct(None)?;
3062 if st.get_data_section_size() == 0 && st.get_pointer_section_size() == 0 {
3063 Ok(self.pointer as *const _ == st.get_location())
3064 } else {
3065 let result =
3066 st.is_canonical(read_head, read_head, &mut data_trunc, &mut ptr_trunc)?;
3067 Ok(result && data_trunc && ptr_trunc)
3068 }
3069 }
3070 PointerType::List => unsafe {
3071 self.get_list_any_size(ptr::null())?
3072 .is_canonical(read_head, self.pointer)
3073 },
3074 PointerType::Capability => Ok(false),
3075 }
3076 }
3077}
3078
3079pub struct PointerBuilder<'a> {
3080 arena: &'a mut dyn BuilderArena,
3081 segment_id: u32,
3082 cap_table: CapTableBuilder,
3083 pointer: *mut WirePointer,
3084}
3085
3086impl<'a> PointerBuilder<'a> {
3087 #[inline]
3088 pub fn get_root(arena: &'a mut dyn BuilderArena, segment_id: u32, location: *mut u8) -> Self {
3089 PointerBuilder {
3090 arena,
3091 cap_table: Default::default(),
3092 segment_id,
3093 pointer: location as *mut _,
3094 }
3095 }
3096
3097 #[inline]
3098 pub fn reborrow(&mut self) -> PointerBuilder<'_> {
3099 PointerBuilder {
3100 arena: self.arena,
3101 ..*self
3102 }
3103 }
3104
3105 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
3106 self.cap_table = cap_table;
3107 }
3108
3109 #[inline]
3110 pub fn is_null(&self) -> bool {
3111 unsafe { (*self.pointer).is_null() }
3112 }
3113
3114 pub fn get_struct(
3115 self,
3116 size: StructSize,
3117 default: Option<&'a [crate::Word]>,
3118 ) -> Result<StructBuilder<'a>> {
3119 unsafe {
3120 wire_helpers::get_writable_struct_pointer(
3121 self.arena,
3122 self.pointer,
3123 self.segment_id,
3124 self.cap_table,
3125 size,
3126 default,
3127 )
3128 }
3129 }
3130
3131 pub fn get_list(
3132 self,
3133 element_size: ElementSize,
3134 default: Option<&'a [crate::Word]>,
3135 ) -> Result<ListBuilder<'a>> {
3136 let default_value: *const u8 = match default {
3137 None => core::ptr::null(),
3138 Some(d) => d.as_ptr() as *const u8,
3139 };
3140 unsafe {
3141 wire_helpers::get_writable_list_pointer(
3142 self.arena,
3143 self.pointer,
3144 self.segment_id,
3145 self.cap_table,
3146 element_size,
3147 default_value,
3148 )
3149 }
3150 }
3151
3152 pub fn get_struct_list(
3153 self,
3154 element_size: StructSize,
3155 default: Option<&'a [crate::Word]>,
3156 ) -> Result<ListBuilder<'a>> {
3157 let default_value: *const u8 = match default {
3158 None => core::ptr::null(),
3159 Some(d) => d.as_ptr() as *const u8,
3160 };
3161 unsafe {
3162 wire_helpers::get_writable_struct_list_pointer(
3163 self.arena,
3164 self.pointer,
3165 self.segment_id,
3166 self.cap_table,
3167 element_size,
3168 default_value,
3169 )
3170 }
3171 }
3172
3173 pub fn get_text(self, default: Option<&'a [crate::Word]>) -> Result<text::Builder<'a>> {
3174 unsafe {
3175 wire_helpers::get_writable_text_pointer(
3176 self.arena,
3177 self.pointer,
3178 self.segment_id,
3179 default,
3180 )
3181 }
3182 }
3183
3184 pub fn get_data(self, default: Option<&'a [crate::Word]>) -> Result<data::Builder<'a>> {
3185 unsafe {
3186 wire_helpers::get_writable_data_pointer(
3187 self.arena,
3188 self.pointer,
3189 self.segment_id,
3190 default,
3191 )
3192 }
3193 }
3194
3195 #[cfg(feature = "alloc")]
3196 pub fn get_capability(&self) -> Result<alloc::boxed::Box<dyn ClientHook>> {
3197 unsafe {
3198 wire_helpers::read_capability_pointer(
3199 self.arena.as_reader(),
3200 self.segment_id,
3201 self.cap_table.into_reader(),
3202 self.pointer,
3203 i32::MAX,
3204 )
3205 }
3206 }
3207
3208 pub fn init_struct(self, size: StructSize) -> StructBuilder<'a> {
3209 unsafe {
3210 wire_helpers::init_struct_pointer(
3211 self.arena,
3212 self.pointer,
3213 self.segment_id,
3214 self.cap_table,
3215 size,
3216 )
3217 }
3218 }
3219
3220 pub fn init_list(
3221 self,
3222 element_size: ElementSize,
3223 element_count: ElementCount32,
3224 ) -> ListBuilder<'a> {
3225 unsafe {
3226 wire_helpers::init_list_pointer(
3227 self.arena,
3228 self.pointer,
3229 self.segment_id,
3230 self.cap_table,
3231 element_count,
3232 element_size,
3233 )
3234 }
3235 }
3236
3237 pub fn init_struct_list(
3238 self,
3239 element_count: ElementCount32,
3240 element_size: StructSize,
3241 ) -> ListBuilder<'a> {
3242 unsafe {
3243 wire_helpers::init_struct_list_pointer(
3244 self.arena,
3245 self.pointer,
3246 self.segment_id,
3247 self.cap_table,
3248 element_count,
3249 element_size,
3250 )
3251 }
3252 }
3253
3254 pub fn init_text(self, size: ByteCount32) -> text::Builder<'a> {
3255 unsafe {
3256 wire_helpers::init_text_pointer(self.arena, self.pointer, self.segment_id, size).value
3257 }
3258 }
3259
3260 pub fn init_data(self, size: ByteCount32) -> data::Builder<'a> {
3261 unsafe {
3262 wire_helpers::init_data_pointer(self.arena, self.pointer, self.segment_id, size).value
3263 }
3264 }
3265
3266 pub fn set_struct(&mut self, value: &StructReader, canonicalize: bool) -> Result<()> {
3267 unsafe {
3268 wire_helpers::set_struct_pointer(
3269 self.arena,
3270 self.segment_id,
3271 self.cap_table,
3272 self.pointer,
3273 *value,
3274 canonicalize,
3275 )?;
3276 Ok(())
3277 }
3278 }
3279
3280 pub fn set_list(&mut self, value: &ListReader, canonicalize: bool) -> Result<()> {
3281 unsafe {
3282 wire_helpers::set_list_pointer(
3283 self.arena,
3284 self.segment_id,
3285 self.cap_table,
3286 self.pointer,
3287 *value,
3288 canonicalize,
3289 )?;
3290 Ok(())
3291 }
3292 }
3293
3294 pub fn set_text(&mut self, value: crate::text::Reader<'_>) {
3295 unsafe {
3296 wire_helpers::set_text_pointer(self.arena, self.pointer, self.segment_id, value);
3297 }
3298 }
3299
3300 pub fn set_data(&mut self, value: &[u8]) {
3301 unsafe {
3302 wire_helpers::set_data_pointer(self.arena, self.pointer, self.segment_id, value);
3303 }
3304 }
3305
3306 #[cfg(feature = "alloc")]
3307 pub fn set_capability(&mut self, cap: alloc::boxed::Box<dyn ClientHook>) {
3308 unsafe {
3309 wire_helpers::set_capability_pointer(
3310 self.arena,
3311 self.segment_id,
3312 self.cap_table,
3313 self.pointer,
3314 cap,
3315 );
3316 }
3317 }
3318
3319 pub fn copy_from(&mut self, other: PointerReader, canonicalize: bool) -> Result<()> {
3320 if other.pointer.is_null() {
3321 if !self.pointer.is_null() {
3322 unsafe {
3323 wire_helpers::zero_object(self.arena, self.segment_id, self.pointer);
3324 *self.pointer = mem::zeroed();
3325 }
3326 }
3327 } else {
3328 unsafe {
3329 wire_helpers::copy_pointer(
3330 self.arena,
3331 self.segment_id,
3332 self.cap_table,
3333 self.pointer,
3334 other.arena,
3335 other.segment_id,
3336 other.cap_table,
3337 other.pointer,
3338 other.nesting_limit,
3339 canonicalize,
3340 )?;
3341 }
3342 }
3343 Ok(())
3344 }
3345
3346 pub fn clear(&mut self) {
3347 unsafe {
3348 wire_helpers::zero_object(self.arena, self.segment_id, self.pointer);
3349 ptr::write_bytes(self.pointer, 0, 1);
3350 }
3351 }
3352
3353 pub fn as_reader(&self) -> PointerReader<'_> {
3354 PointerReader {
3355 arena: self.arena.as_reader(),
3356 segment_id: self.segment_id,
3357 cap_table: self.cap_table.into_reader(),
3358 pointer: self.pointer,
3359 nesting_limit: 0x7fffffff,
3360 }
3361 }
3362
3363 pub fn into_reader(self) -> PointerReader<'a> {
3364 PointerReader {
3365 arena: self.arena.as_reader(),
3366 segment_id: self.segment_id,
3367 cap_table: self.cap_table.into_reader(),
3368 pointer: self.pointer,
3369 nesting_limit: 0x7fffffff,
3370 }
3371 }
3372}
3373
3374#[derive(Clone, Copy)]
3375pub struct StructReader<'a> {
3376 arena: &'a dyn ReaderArena,
3377 cap_table: CapTableReader,
3378 data: *const u8,
3379 pointers: *const WirePointer,
3380 segment_id: u32,
3381 data_size: BitCount32,
3382 pointer_count: WirePointerCount16,
3383 nesting_limit: i32,
3384}
3385
3386impl<'a> StructReader<'a> {
3387 pub fn new_default<'b>() -> StructReader<'b> {
3388 StructReader {
3389 arena: &NULL_ARENA,
3390 segment_id: 0,
3391 cap_table: Default::default(),
3392 data: ptr::null(),
3393 pointers: ptr::null(),
3394 data_size: 0,
3395 pointer_count: 0,
3396 nesting_limit: 0x7fffffff,
3397 }
3398 }
3399
3400 pub fn imbue(&mut self, cap_table: CapTableReader) {
3401 self.cap_table = cap_table
3402 }
3403
3404 pub fn get_data_section_size(&self) -> BitCount32 {
3405 self.data_size
3406 }
3407
3408 pub fn get_pointer_section_size(&self) -> WirePointerCount16 {
3409 self.pointer_count
3410 }
3411
3412 pub fn get_pointer_section_as_list(&self) -> ListReader<'a> {
3413 ListReader {
3414 arena: self.arena,
3415 segment_id: self.segment_id,
3416 cap_table: self.cap_table,
3417 ptr: self.pointers as *const _,
3418 element_count: u32::from(self.pointer_count),
3419 element_size: ElementSize::Pointer,
3420 step: BITS_PER_WORD as BitCount32,
3421 struct_data_size: 0,
3422 struct_pointer_count: 0,
3423 nesting_limit: self.nesting_limit,
3424 }
3425 }
3426
3427 pub fn get_data_section_as_blob(&self) -> &'a [u8] {
3428 if self.data_size == 0 {
3429 &[]
3432 } else {
3433 unsafe {
3434 ::core::slice::from_raw_parts(self.data, self.data_size as usize / BITS_PER_BYTE)
3435 }
3436 }
3437 }
3438
3439 #[inline]
3440 pub fn get_data_field<T: Primitive + zero::Zero>(&self, offset: ElementCount) -> T {
3441 if (offset + 1) * bits_per_element::<T>() <= self.data_size as usize {
3445 let dwv: *const <T as Primitive>::Raw = self.data as *const _;
3446 unsafe { <T as Primitive>::get(&*dwv.add(offset)) }
3447 } else {
3448 T::zero()
3449 }
3450 }
3451
3452 #[inline]
3453 pub fn get_bool_field(&self, offset: ElementCount) -> bool {
3454 let boffset: BitCount32 = offset as BitCount32;
3455 if boffset < self.data_size {
3456 unsafe {
3457 let b: *const u8 = self.data.add(boffset as usize / BITS_PER_BYTE);
3458 ((*b) & (1u8 << (boffset % BITS_PER_BYTE as u32) as usize)) != 0
3459 }
3460 } else {
3461 false
3462 }
3463 }
3464
3465 #[inline]
3466 pub fn get_data_field_mask<T: Primitive + zero::Zero + Mask>(
3467 &self,
3468 offset: ElementCount,
3469 mask: <T as Mask>::T,
3470 ) -> T {
3471 Mask::mask(self.get_data_field(offset), mask)
3472 }
3473
3474 #[inline]
3475 pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool {
3476 self.get_bool_field(offset) ^ mask
3477 }
3478
3479 #[inline]
3480 pub fn get_pointer_field(&self, ptr_index: WirePointerCount) -> PointerReader<'a> {
3481 if ptr_index < self.pointer_count as WirePointerCount {
3482 PointerReader {
3483 arena: self.arena,
3484 segment_id: self.segment_id,
3485 cap_table: self.cap_table,
3486 pointer: unsafe { self.pointers.add(ptr_index) },
3487 nesting_limit: self.nesting_limit,
3488 }
3489 } else {
3490 PointerReader::new_default()
3491 }
3492 }
3493
3494 #[inline]
3495 pub fn is_pointer_field_null(&self, ptr_index: WirePointerCount) -> bool {
3496 self.get_pointer_field(ptr_index).is_null()
3497 }
3498
3499 pub fn total_size(&self) -> Result<MessageSize> {
3500 let mut result = MessageSize {
3501 word_count: u64::from(wire_helpers::round_bits_up_to_words(u64::from(
3502 self.data_size,
3503 ))) + u64::from(self.pointer_count) * WORDS_PER_POINTER as u64,
3504 cap_count: 0,
3505 };
3506
3507 for i in 0..self.pointer_count as isize {
3508 unsafe {
3509 result += wire_helpers::total_size(
3510 self.arena,
3511 self.segment_id,
3512 self.pointers.offset(i),
3513 self.nesting_limit,
3514 )?;
3515 }
3516 }
3517
3518 Ok(result)
3521 }
3522
3523 fn get_location(&self) -> *const u8 {
3524 self.data
3525 }
3526
3527 pub fn is_canonical(
3528 &self,
3529 read_head: &Cell<*const u8>,
3530 ptr_head: &Cell<*const u8>,
3531 data_trunc: &mut bool,
3532 ptr_trunc: &mut bool,
3533 ) -> Result<bool> {
3534 if self.get_location() != read_head.get() {
3535 return Ok(false);
3536 }
3537
3538 if self.get_data_section_size() % BITS_PER_WORD as u32 != 0 {
3539 return Ok(false);
3541 }
3542
3543 let data_size = self.get_data_section_size() / BITS_PER_WORD as u32;
3544
3545 if data_size != 0 {
3547 *data_trunc = self.get_data_field::<u64>((data_size - 1) as usize) != 0;
3548 } else {
3549 *data_trunc = true;
3550 }
3551
3552 if self.pointer_count != 0 {
3553 *ptr_trunc = !self
3554 .get_pointer_field(self.pointer_count as usize - 1)
3555 .is_null();
3556 } else {
3557 *ptr_trunc = true;
3558 }
3559
3560 read_head.set(unsafe {
3561 (read_head.get()).offset(
3562 (data_size as isize + self.pointer_count as isize) * (BYTES_PER_WORD as isize),
3563 )
3564 });
3565
3566 for ptr_idx in 0..self.pointer_count {
3567 if !self
3568 .get_pointer_field(ptr_idx as usize)
3569 .is_canonical(ptr_head)?
3570 {
3571 return Ok(false);
3572 }
3573 }
3574
3575 Ok(true)
3576 }
3577}
3578
3579pub struct StructBuilder<'a> {
3580 arena: &'a mut dyn BuilderArena,
3581 cap_table: CapTableBuilder,
3582 data: *mut u8,
3583 pointers: *mut WirePointer,
3584 segment_id: u32,
3585 data_size: BitCount32,
3586 pointer_count: WirePointerCount16,
3587}
3588
3589impl<'a> StructBuilder<'a> {
3590 #[inline]
3591 pub fn reborrow(&mut self) -> StructBuilder<'_> {
3592 StructBuilder {
3593 arena: self.arena,
3594 ..*self
3595 }
3596 }
3597
3598 pub fn as_reader(&self) -> StructReader<'_> {
3599 StructReader {
3600 arena: self.arena.as_reader(),
3601 cap_table: self.cap_table.into_reader(),
3602 data: self.data,
3603 pointers: self.pointers,
3604 pointer_count: self.pointer_count,
3605 segment_id: self.segment_id,
3606 data_size: self.data_size,
3607 nesting_limit: 0x7fffffff,
3608 }
3609 }
3610
3611 pub fn into_reader(self) -> StructReader<'a> {
3612 StructReader {
3613 arena: self.arena.as_reader(),
3614 cap_table: self.cap_table.into_reader(),
3615 data: self.data,
3616 pointers: self.pointers,
3617 pointer_count: self.pointer_count,
3618 segment_id: self.segment_id,
3619 data_size: self.data_size,
3620 nesting_limit: 0x7fffffff,
3621 }
3622 }
3623
3624 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
3625 self.cap_table = cap_table
3626 }
3627
3628 #[inline]
3629 pub fn set_data_field<T: Primitive>(&self, offset: ElementCount, value: T) {
3630 let ptr: *mut <T as Primitive>::Raw = self.data as *mut _;
3631 unsafe { <T as Primitive>::set(&mut *ptr.add(offset), value) }
3632 }
3633
3634 #[inline]
3635 pub fn set_data_field_mask<T: Primitive + Mask>(
3636 &self,
3637 offset: ElementCount,
3638 value: T,
3639 mask: <T as Mask>::T,
3640 ) {
3641 self.set_data_field(offset, Mask::mask(value, mask));
3642 }
3643
3644 #[inline]
3645 pub fn get_data_field<T: Primitive>(&self, offset: ElementCount) -> T {
3646 let ptr: *const <T as Primitive>::Raw = self.data as *const _;
3647 unsafe { <T as Primitive>::get(&*ptr.add(offset)) }
3648 }
3649
3650 #[inline]
3651 pub fn get_data_field_mask<T: Primitive + Mask>(
3652 &self,
3653 offset: ElementCount,
3654 mask: <T as Mask>::T,
3655 ) -> T {
3656 Mask::mask(self.get_data_field(offset), mask)
3657 }
3658
3659 #[inline]
3660 pub fn set_bool_field(&self, offset: ElementCount, value: bool) {
3661 let boffset: BitCount0 = offset;
3664 let b = unsafe { self.data.add(boffset / BITS_PER_BYTE) };
3665 let bitnum = boffset % BITS_PER_BYTE;
3666 unsafe { (*b) = ((*b) & !(1 << bitnum)) | (u8::from(value) << bitnum) }
3667 }
3668
3669 #[inline]
3670 pub fn set_bool_field_mask(&self, offset: ElementCount, value: bool, mask: bool) {
3671 self.set_bool_field(offset, value ^ mask);
3672 }
3673
3674 #[inline]
3675 pub fn get_bool_field(&self, offset: ElementCount) -> bool {
3676 let boffset: BitCount0 = offset;
3677 let b = unsafe { self.data.add(boffset / BITS_PER_BYTE) };
3678 unsafe { ((*b) & (1 << (boffset % BITS_PER_BYTE))) != 0 }
3679 }
3680
3681 #[inline]
3682 pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool {
3683 self.get_bool_field(offset) ^ mask
3684 }
3685
3686 #[inline]
3687 pub fn get_pointer_field(self, ptr_index: WirePointerCount) -> PointerBuilder<'a> {
3688 PointerBuilder {
3689 arena: self.arena,
3690 segment_id: self.segment_id,
3691 cap_table: self.cap_table,
3692 pointer: unsafe { self.pointers.add(ptr_index) },
3693 }
3694 }
3695
3696 #[inline]
3697 pub fn is_pointer_field_null(&self, ptr_index: WirePointerCount) -> bool {
3698 unsafe { (*self.pointers.add(ptr_index)).is_null() }
3699 }
3700
3701 pub fn copy_content_from(&mut self, other: &StructReader) -> Result<()> {
3702 use core::cmp::min;
3703 let shared_data_size = min(self.data_size, other.data_size);
3705 let shared_pointer_count = min(self.pointer_count, other.pointer_count);
3706
3707 if (shared_data_size > 0 && other.data == self.data)
3708 || (shared_pointer_count > 0 && other.pointers == self.pointers)
3709 {
3710 if (shared_data_size == 0 || other.data == self.data)
3713 && (shared_pointer_count == 0 || other.pointers == self.pointers)
3714 {
3715 return Err(Error::from_kind(
3716 ErrorKind::OnlyOneOfTheSectionPointersIsPointingToOurself,
3717 ));
3718 }
3719
3720 return Ok(());
3722 }
3723
3724 unsafe {
3725 if self.data_size > shared_data_size {
3726 if self.data_size == 1 {
3729 self.set_bool_field(0, false);
3730 } else {
3731 let unshared = self
3732 .data
3733 .offset((shared_data_size / BITS_PER_BYTE as u32) as isize);
3734 ptr::write_bytes(
3735 unshared,
3736 0,
3737 ((self.data_size - shared_data_size) / BITS_PER_BYTE as u32) as usize,
3738 );
3739 }
3740 }
3741
3742 if shared_data_size == 1 {
3744 self.set_bool_field(0, other.get_bool_field(0));
3745 } else {
3746 wire_helpers::copy_nonoverlapping_check_zero(
3747 other.data,
3748 self.data,
3749 (shared_data_size / BITS_PER_BYTE as u32) as usize,
3750 );
3751 }
3752
3753 for i in 0..self.pointer_count as isize {
3755 wire_helpers::zero_object(
3756 self.arena,
3757 self.segment_id,
3758 self.pointers.offset(i) as *mut _,
3759 );
3760 }
3761 ptr::write_bytes(self.pointers, 0u8, self.pointer_count as usize);
3762
3763 for i in 0..shared_pointer_count as isize {
3764 wire_helpers::copy_pointer(
3765 self.arena,
3766 self.segment_id,
3767 self.cap_table,
3768 self.pointers.offset(i),
3769 other.arena,
3770 other.segment_id,
3771 other.cap_table,
3772 other.pointers.offset(i),
3773 other.nesting_limit,
3774 false,
3775 )?;
3776 }
3777 }
3778
3779 Ok(())
3780 }
3781}
3782
3783#[derive(Clone, Copy)]
3784pub struct ListReader<'a> {
3785 arena: &'a dyn ReaderArena,
3786 cap_table: CapTableReader,
3787 ptr: *const u8,
3788 segment_id: u32,
3789 element_count: ElementCount32,
3790 step: BitCount32,
3791 struct_data_size: BitCount32,
3792 nesting_limit: i32,
3793 struct_pointer_count: WirePointerCount16,
3794 element_size: ElementSize,
3795}
3796
3797impl<'a> ListReader<'a> {
3798 pub fn new_default<'b>() -> ListReader<'b> {
3799 ListReader {
3800 arena: &NULL_ARENA,
3801 segment_id: 0,
3802 cap_table: Default::default(),
3803 ptr: ptr::null(),
3804 element_count: 0,
3805 element_size: ElementSize::Void,
3806 step: 0,
3807 struct_data_size: 0,
3808 struct_pointer_count: 0,
3809 nesting_limit: 0x7fffffff,
3810 }
3811 }
3812
3813 pub fn imbue(&mut self, cap_table: CapTableReader) {
3814 self.cap_table = cap_table
3815 }
3816
3817 #[inline]
3818 pub fn len(&self) -> ElementCount32 {
3819 self.element_count
3820 }
3821
3822 pub fn is_empty(&self) -> bool {
3823 self.len() == 0
3824 }
3825
3826 pub(crate) fn get_step_size_in_bits(&self) -> u32 {
3827 self.step
3828 }
3829
3830 pub(crate) fn get_element_size(&self) -> ElementSize {
3831 self.element_size
3832 }
3833
3834 pub(crate) fn into_raw_bytes(self) -> &'a [u8] {
3835 if self.element_count == 0 {
3836 &[]
3839 } else {
3840 let num_bytes = wire_helpers::round_bits_up_to_bytes(
3841 u64::from(self.step) * u64::from(self.element_count),
3842 ) as usize;
3843 unsafe { ::core::slice::from_raw_parts(self.ptr, num_bytes) }
3844 }
3845 }
3846
3847 #[inline]
3848 pub fn get_struct_element(&self, index: ElementCount32) -> StructReader<'a> {
3849 let index_byte: ByteCount32 =
3850 ((u64::from(index) * u64::from(self.step)) / BITS_PER_BYTE as u64) as u32;
3851
3852 let struct_data: *const u8 = unsafe { self.ptr.offset(index_byte as isize) };
3853
3854 let struct_pointers: *const WirePointer =
3855 unsafe { struct_data.add(self.struct_data_size as usize / BITS_PER_BYTE) as *const _ };
3856
3857 StructReader {
3858 arena: self.arena,
3859 segment_id: self.segment_id,
3860 cap_table: self.cap_table,
3861 data: struct_data,
3862 pointers: struct_pointers,
3863 data_size: self.struct_data_size,
3864 pointer_count: self.struct_pointer_count,
3865 nesting_limit: self.nesting_limit - 1,
3866 }
3867 }
3868
3869 #[inline]
3870 pub fn get_pointer_element(self, index: ElementCount32) -> PointerReader<'a> {
3871 let offset = (self.struct_data_size as u64 / BITS_PER_BYTE as u64
3872 + u64::from(index) * u64::from(self.step) / BITS_PER_BYTE as u64)
3873 as isize;
3874 PointerReader {
3875 arena: self.arena,
3876 segment_id: self.segment_id,
3877 cap_table: self.cap_table,
3878 pointer: unsafe { self.ptr.offset(offset) } as *const _,
3879 nesting_limit: self.nesting_limit,
3880 }
3881 }
3882
3883 pub unsafe fn is_canonical(
3884 &self,
3885 read_head: &Cell<*const u8>,
3886 reff: *const WirePointer,
3887 ) -> Result<bool> {
3888 match self.element_size {
3889 ElementSize::InlineComposite => {
3890 read_head.set(unsafe { read_head.get().add(BYTES_PER_WORD) }); if !core::ptr::eq(self.ptr, read_head.get()) {
3892 return Ok(false);
3893 }
3894 if self.struct_data_size % BITS_PER_WORD as u32 != 0 {
3895 return Ok(false);
3896 }
3897 let struct_size = (self.struct_data_size / BITS_PER_WORD as u32)
3898 + u32::from(self.struct_pointer_count);
3899 let word_count = unsafe { (*reff).list_inline_composite_word_count() };
3900 if struct_size * self.element_count != word_count {
3901 return Ok(false);
3902 }
3903 if struct_size == 0 {
3904 return Ok(true);
3905 }
3906 let list_end = unsafe {
3907 read_head
3908 .get()
3909 .add((self.element_count * struct_size) as usize * BYTES_PER_WORD)
3910 };
3911 let pointer_head = Cell::new(list_end);
3912 let mut list_data_trunc = false;
3913 let mut list_ptr_trunc = false;
3914 for idx in 0..self.element_count {
3915 let mut data_trunc = false;
3916 let mut ptr_trunc = false;
3917 if !self.get_struct_element(idx).is_canonical(
3918 read_head,
3919 &pointer_head,
3920 &mut data_trunc,
3921 &mut ptr_trunc,
3922 )? {
3923 return Ok(false);
3924 }
3925 list_data_trunc |= data_trunc;
3926 list_ptr_trunc |= ptr_trunc;
3927 }
3928 assert_eq!(read_head.get(), list_end);
3929 read_head.set(pointer_head.get());
3930 Ok(list_data_trunc && list_ptr_trunc)
3931 }
3932 ElementSize::Pointer => {
3933 if !core::ptr::eq(self.ptr, read_head.get()) {
3934 return Ok(false);
3935 }
3936 read_head.set(unsafe {
3937 read_head
3938 .get()
3939 .offset(self.element_count as isize * BYTES_PER_WORD as isize)
3940 });
3941 for idx in 0..self.element_count {
3942 if !self.get_pointer_element(idx).is_canonical(read_head)? {
3943 return Ok(false);
3944 }
3945 }
3946 Ok(true)
3947 }
3948 element_size => {
3949 if !core::ptr::eq(self.ptr, read_head.get()) {
3950 return Ok(false);
3951 }
3952 let bit_size =
3953 u64::from(self.element_count) * u64::from(data_bits_per_element(element_size));
3954 let mut word_size = bit_size / BITS_PER_WORD as u64;
3955 if bit_size % BITS_PER_WORD as u64 != 0 {
3956 word_size += 1
3957 }
3958
3959 let byte_size = bit_size / BITS_PER_BYTE as u64;
3960 let mut byte_read_head: *const u8 = read_head.get();
3961 byte_read_head = unsafe { byte_read_head.offset(byte_size as isize) };
3962 let read_head_end = unsafe {
3963 read_head
3964 .get()
3965 .offset(word_size as isize * BYTES_PER_WORD as isize)
3966 };
3967
3968 let leftover_bits = bit_size % BITS_PER_BYTE as u64;
3969 if leftover_bits > 0 {
3970 let mask: u8 = !((1 << leftover_bits as u8) - 1);
3971 let partial_byte = unsafe { *byte_read_head };
3972
3973 if partial_byte & mask != 0 {
3974 return Ok(false);
3975 }
3976 byte_read_head = unsafe { byte_read_head.offset(1_isize) };
3977 }
3978
3979 while byte_read_head != read_head_end {
3980 if unsafe { *byte_read_head } != 0 {
3981 return Ok(false);
3982 }
3983 byte_read_head = unsafe { byte_read_head.offset(1_isize) };
3984 }
3985
3986 read_head.set(read_head_end);
3987 Ok(true)
3988 }
3989 }
3990 }
3991}
3992
3993pub struct ListBuilder<'a> {
3994 arena: &'a mut dyn BuilderArena,
3995 cap_table: CapTableBuilder,
3996 ptr: *mut u8,
3997 segment_id: u32,
3998 element_count: ElementCount32,
3999 step: BitCount32,
4000 struct_data_size: BitCount32,
4001 struct_pointer_count: WirePointerCount16,
4002 element_size: ElementSize,
4003}
4004
4005impl<'a> ListBuilder<'a> {
4006 #[inline]
4007 pub fn new_default(arena: &mut dyn BuilderArena) -> ListBuilder<'_> {
4008 ListBuilder {
4009 arena,
4010 segment_id: 0,
4011 cap_table: Default::default(),
4012 ptr: ptr::null_mut(),
4013 element_count: 0,
4014 element_size: ElementSize::Void,
4015 step: 0,
4016 struct_data_size: 0,
4017 struct_pointer_count: 0,
4018 }
4019 }
4020
4021 pub fn into_reader(self) -> ListReader<'a> {
4022 ListReader {
4023 arena: self.arena.as_reader(),
4024 segment_id: self.segment_id,
4025 cap_table: self.cap_table.into_reader(),
4026 ptr: self.ptr as *const _,
4027 element_count: self.element_count,
4028 element_size: self.element_size,
4029 step: self.step,
4030 struct_data_size: self.struct_data_size,
4031 struct_pointer_count: self.struct_pointer_count,
4032 nesting_limit: 0x7fffffff,
4033 }
4034 }
4035
4036 #[inline]
4037 pub fn reborrow(&mut self) -> ListBuilder<'_> {
4038 ListBuilder {
4039 arena: self.arena,
4040 ..*self
4041 }
4042 }
4043
4044 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
4045 self.cap_table = cap_table
4046 }
4047
4048 #[inline]
4049 pub fn len(&self) -> ElementCount32 {
4050 self.element_count
4051 }
4052
4053 pub fn is_empty(&self) -> bool {
4054 self.len() == 0
4055 }
4056
4057 #[inline]
4058 pub fn get_struct_element(self, index: ElementCount32) -> StructBuilder<'a> {
4059 let index_byte = ((u64::from(index) * u64::from(self.step)) / BITS_PER_BYTE as u64) as u32;
4060 let struct_data = unsafe { self.ptr.offset(index_byte as isize) };
4061 let struct_pointers =
4062 unsafe { struct_data.add((self.struct_data_size as usize) / BITS_PER_BYTE) as *mut _ };
4063 StructBuilder {
4064 arena: self.arena,
4065 segment_id: self.segment_id,
4066 cap_table: self.cap_table,
4067 data: struct_data,
4068 pointers: struct_pointers,
4069 data_size: self.struct_data_size,
4070 pointer_count: self.struct_pointer_count,
4071 }
4072 }
4073
4074 pub(crate) fn get_element_size(&self) -> ElementSize {
4075 self.element_size
4076 }
4077
4078 #[inline]
4079 pub fn get_pointer_element(self, index: ElementCount32) -> PointerBuilder<'a> {
4080 let offset = (u64::from(index) * u64::from(self.step) / BITS_PER_BYTE as u64) as u32;
4081 PointerBuilder {
4082 arena: self.arena,
4083 segment_id: self.segment_id,
4084 cap_table: self.cap_table,
4085 pointer: unsafe { self.ptr.offset(offset as isize) } as *mut _,
4086 }
4087 }
4088
4089 pub(crate) fn as_raw_bytes(&self) -> &'a mut [u8] {
4090 if self.element_count == 0 {
4091 &mut []
4094 } else {
4095 let num_bytes = wire_helpers::round_bits_up_to_bytes(
4096 u64::from(self.step) * u64::from(self.element_count),
4097 ) as usize;
4098 unsafe { ::core::slice::from_raw_parts_mut(self.ptr, num_bytes) }
4099 }
4100 }
4101}
4102
4103pub trait PrimitiveElement {
4107 fn get(list_reader: &ListReader, index: ElementCount32) -> Self;
4109
4110 fn get_from_builder(list_builder: &ListBuilder, index: ElementCount32) -> Self;
4112
4113 fn set(list_builder: &ListBuilder, index: ElementCount32, value: Self);
4115
4116 fn element_size() -> ElementSize;
4118}
4119
4120impl<T: Primitive> PrimitiveElement for T {
4121 #[inline]
4122 fn get(list_reader: &ListReader, index: ElementCount32) -> Self {
4123 let offset = (u64::from(index) * u64::from(list_reader.step) / BITS_PER_BYTE as u64) as u32;
4124 unsafe {
4125 let ptr: *const u8 = list_reader.ptr.offset(offset as isize);
4126 <Self as Primitive>::get(&*(ptr as *const <Self as Primitive>::Raw))
4127 }
4128 }
4129
4130 #[inline]
4131 fn get_from_builder(list_builder: &ListBuilder, index: ElementCount32) -> Self {
4132 let offset =
4133 (u64::from(index) * u64::from(list_builder.step) / BITS_PER_BYTE as u64) as u32;
4134 unsafe {
4135 let ptr: *mut <Self as Primitive>::Raw =
4136 list_builder.ptr.offset(offset as isize) as *mut _;
4137 <Self as Primitive>::get(&*ptr)
4138 }
4139 }
4140
4141 #[inline]
4142 fn set(list_builder: &ListBuilder, index: ElementCount32, value: Self) {
4143 let offset =
4144 (u64::from(index) * u64::from(list_builder.step) / BITS_PER_BYTE as u64) as u32;
4145 unsafe {
4146 let ptr: *mut <Self as Primitive>::Raw =
4147 list_builder.ptr.offset(offset as isize) as *mut _;
4148 <Self as Primitive>::set(&mut *ptr, value);
4149 }
4150 }
4151
4152 fn element_size() -> ElementSize {
4153 match mem::size_of::<Self>() {
4154 0 => Void,
4155 1 => Byte,
4156 2 => TwoBytes,
4157 4 => FourBytes,
4158 8 => EightBytes,
4159 _ => unreachable!(),
4160 }
4161 }
4162}
4163
4164impl PrimitiveElement for bool {
4165 #[inline]
4166 fn get(list: &ListReader, index: ElementCount32) -> Self {
4167 let bindex = u64::from(index) * u64::from(list.step);
4168 unsafe {
4169 let b: *const u8 = list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize);
4170 ((*b) & (1 << (bindex % BITS_PER_BYTE as u64))) != 0
4171 }
4172 }
4173 #[inline]
4174 fn get_from_builder(list: &ListBuilder, index: ElementCount32) -> Self {
4175 let bindex = u64::from(index) * u64::from(list.step);
4176 let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize) };
4177 unsafe { ((*b) & (1 << (bindex % BITS_PER_BYTE as u64))) != 0 }
4178 }
4179 #[inline]
4180 fn set(list: &ListBuilder, index: ElementCount32, value: Self) {
4181 let bindex = u64::from(index) * u64::from(list.step);
4182 let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize) };
4183
4184 let bitnum = bindex % BITS_PER_BYTE as u64;
4185 unsafe { (*b) = ((*b) & !(1 << bitnum)) | (u8::from(value) << bitnum) }
4186 }
4187 fn element_size() -> ElementSize {
4188 Bit
4189 }
4190}
4191
4192impl PrimitiveElement for () {
4193 #[inline]
4194 fn get(_list: &ListReader, _index: ElementCount32) {}
4195
4196 #[inline]
4197 fn get_from_builder(_list: &ListBuilder, _index: ElementCount32) {}
4198
4199 #[inline]
4200 fn set(_list: &ListBuilder, _index: ElementCount32, _value: ()) {}
4201
4202 fn element_size() -> ElementSize {
4203 Void
4204 }
4205}