1use core::cell::Cell;
23use core::mem;
24use core::ptr;
25
26use crate::data;
27use crate::private::arena::{BuilderArena, NullArena, ReaderArena, SegmentId};
28#[cfg(feature = "alloc")]
29use crate::private::capability::ClientHook;
30use crate::private::mask::Mask;
31use crate::private::primitive::{Primitive, WireValue};
32use crate::private::units::*;
33use crate::private::zero;
34use crate::text;
35use crate::{Error, ErrorKind, MessageSize, Result};
36
37pub use self::ElementSize::{
38 Bit, Byte, EightBytes, FourBytes, InlineComposite, Pointer, TwoBytes, Void,
39};
40
41#[repr(u8)]
42#[derive(Clone, Copy, Debug, PartialEq)]
43pub enum ElementSize {
44 Void = 0,
45 Bit = 1,
46 Byte = 2,
47 TwoBytes = 3,
48 FourBytes = 4,
49 EightBytes = 5,
50 Pointer = 6,
51 InlineComposite = 7,
52}
53
54impl ElementSize {
55 fn from(val: u8) -> Self {
56 match val {
57 0 => Self::Void,
58 1 => Self::Bit,
59 2 => Self::Byte,
60 3 => Self::TwoBytes,
61 4 => Self::FourBytes,
62 5 => Self::EightBytes,
63 6 => Self::Pointer,
64 7 => Self::InlineComposite,
65 _ => panic!("illegal element size: {val}"),
66 }
67 }
68}
69
70pub fn data_bits_per_element(size: ElementSize) -> BitCount32 {
71 match size {
72 Void => 0,
73 Bit => 1,
74 Byte => 8,
75 TwoBytes => 16,
76 FourBytes => 32,
77 EightBytes => 64,
78 Pointer => 0,
79 InlineComposite => 0,
80 }
81}
82
83pub fn pointers_per_element(size: ElementSize) -> WirePointerCount32 {
84 match size {
85 Pointer => 1,
86 _ => 0,
87 }
88}
89
90#[derive(Clone, Copy, Debug)]
91pub struct StructSize {
92 pub data: WordCount16,
93 pub pointers: WirePointerCount16,
94}
95
96impl StructSize {
97 pub fn total(&self) -> WordCount32 {
98 u32::from(self.data) + u32::from(self.pointers) * WORDS_PER_POINTER as WordCount32
99 }
100}
101
102#[repr(u8)]
103#[derive(Clone, Copy, PartialEq)]
104pub enum WirePointerKind {
105 Struct = 0,
106 List = 1,
107 Far = 2,
108 Other = 3,
109}
110
111pub enum PointerType {
112 Null,
113 Struct,
114 List,
115 Capability,
116}
117
118impl WirePointerKind {
119 fn from(val: u8) -> Self {
120 match val {
121 0 => Self::Struct,
122 1 => Self::List,
123 2 => Self::Far,
124 3 => Self::Other,
125 _ => panic!("illegal element size: {val}"),
126 }
127 }
128}
129
130#[repr(C)]
131pub struct WirePointer {
132 offset_and_kind: WireValue<u32>,
133 upper32bits: WireValue<u32>,
134}
135
136#[test]
137#[cfg(feature = "unaligned")]
138fn wire_pointer_align() {
139 assert_eq!(core::mem::align_of::<WirePointer>(), 1);
141}
142
143impl WirePointer {
144 #[inline]
145 pub fn kind(&self) -> WirePointerKind {
146 WirePointerKind::from(self.offset_and_kind.get() as u8 & 3)
147 }
148
149 #[inline]
150 pub fn is_positional(&self) -> bool {
151 (self.offset_and_kind.get() & 2) == 0 }
153
154 #[inline]
155 pub fn is_capability(&self) -> bool {
156 self.offset_and_kind.get() == WirePointerKind::Other as u32
157 }
158
159 #[inline]
160 pub unsafe fn target(ptr: *const Self) -> *const u8 {
161 let this_addr: *const u8 = ptr as *const _;
162 unsafe { this_addr.offset(8 * (1 + (((*ptr).offset_and_kind.get() as i32) >> 2)) as isize) }
163 }
164
165 #[inline]
168 fn target_from_segment(
169 ptr: *const Self,
170 arena: &dyn ReaderArena,
171 segment_id: u32,
172 ) -> Result<*const u8> {
173 let this_addr: *const u8 = ptr as *const _;
174 unsafe {
175 let offset = 1 + (((*ptr).offset_and_kind.get() as i32) >> 2);
176 arena.check_offset(segment_id, this_addr, offset)
177 }
178 }
179
180 #[inline]
183 fn mut_target(ptr: *mut Self) -> *mut u8 {
184 let this_addr: *mut u8 = ptr as *mut _;
185 unsafe {
186 this_addr.wrapping_offset(
187 BYTES_PER_WORD as isize
188 * (1 + (((*ptr).offset_and_kind.get() as i32) >> 2)) as isize,
189 )
190 }
191 }
192
193 #[inline]
194 pub fn set_kind_and_target(&mut self, kind: WirePointerKind, target: *mut u8) {
195 let this_addr: isize = self as *const _ as isize;
196 let target_addr: isize = target as *const _ as isize;
197 self.offset_and_kind.set(
198 ((((target_addr - this_addr) / BYTES_PER_WORD as isize) as i32 - 1) << 2) as u32
199 | (kind as u32),
200 )
201 }
202
203 #[inline]
204 pub fn set_kind_with_zero_offset(&mut self, kind: WirePointerKind) {
205 self.offset_and_kind.set(kind as u32)
206 }
207
208 #[inline]
209 pub fn set_kind_and_target_for_empty_struct(&mut self) {
210 self.offset_and_kind.set(0xfffffffc);
220 }
221
222 #[inline]
223 pub fn inline_composite_list_element_count(&self) -> ElementCount32 {
224 self.offset_and_kind.get() >> 2
225 }
226
227 #[inline]
228 pub fn set_kind_and_inline_composite_list_element_count(
229 &mut self,
230 kind: WirePointerKind,
231 element_count: ElementCount32,
232 ) {
233 self.offset_and_kind
234 .set((element_count << 2) | (kind as u32))
235 }
236
237 #[inline]
238 pub fn far_position_in_segment(&self) -> WordCount32 {
239 self.offset_and_kind.get() >> 3
240 }
241
242 #[inline]
243 pub fn is_double_far(&self) -> bool {
244 ((self.offset_and_kind.get() >> 2) & 1) != 0
245 }
246
247 #[inline]
248 pub fn set_far(&mut self, is_double_far: bool, pos: WordCount32) {
249 self.offset_and_kind
250 .set((pos << 3) | (u32::from(is_double_far) << 2) | WirePointerKind::Far as u32);
251 }
252
253 #[inline]
254 pub fn set_cap(&mut self, index: u32) {
255 self.offset_and_kind.set(WirePointerKind::Other as u32);
256 self.upper32bits.set(index);
257 }
258
259 #[inline]
260 pub fn struct_data_size(&self) -> WordCount16 {
261 self.upper32bits.get() as WordCount16
262 }
263
264 #[inline]
265 pub fn struct_ptr_count(&self) -> WordCount16 {
266 (self.upper32bits.get() >> 16) as WordCount16
267 }
268
269 #[inline]
270 pub fn struct_word_size(&self) -> WordCount32 {
271 u32::from(self.struct_data_size())
272 + u32::from(self.struct_ptr_count()) * WORDS_PER_POINTER as u32
273 }
274
275 #[inline]
276 pub fn set_struct_size(&mut self, size: StructSize) {
277 self.upper32bits
278 .set(u32::from(size.data) | (u32::from(size.pointers) << 16))
279 }
280
281 #[inline]
282 pub fn set_struct_size_from_pieces(&mut self, ds: WordCount16, rc: WirePointerCount16) {
283 self.set_struct_size(StructSize {
284 data: ds,
285 pointers: rc,
286 })
287 }
288
289 #[inline]
290 pub fn list_element_size(&self) -> ElementSize {
291 ElementSize::from(self.upper32bits.get() as u8 & 7)
292 }
293
294 #[inline]
295 pub fn list_element_count(&self) -> ElementCount32 {
296 self.upper32bits.get() >> 3
297 }
298
299 #[inline]
300 pub fn list_inline_composite_word_count(&self) -> WordCount32 {
301 self.list_element_count()
302 }
303
304 #[inline]
305 pub fn set_list_size_and_count(&mut self, es: ElementSize, ec: ElementCount32) {
306 assert!(ec < (1 << 29), "Lists are limited to 2**29 elements");
307 self.upper32bits.set((ec << 3) | (es as u32));
308 }
309
310 #[inline]
311 pub fn set_list_inline_composite(&mut self, wc: WordCount32) {
312 assert!(
313 wc < (1 << 29),
314 "Inline composite lists are limited to 2**29 words"
315 );
316 self.upper32bits.set((wc << 3) | (InlineComposite as u32));
317 }
318
319 #[inline]
320 pub fn far_segment_id(&self) -> SegmentId {
321 self.upper32bits.get() as SegmentId
322 }
323
324 #[inline]
325 pub fn set_far_segment_id(&mut self, si: SegmentId) {
326 self.upper32bits.set(si)
327 }
328
329 #[inline]
330 pub fn cap_index(&self) -> u32 {
331 self.upper32bits.get()
332 }
333
334 #[inline]
335 pub fn set_cap_index(&mut self, index: u32) {
336 self.upper32bits.set(index)
337 }
338
339 #[inline]
340 pub fn is_null(&self) -> bool {
341 self.offset_and_kind.get() == 0 && self.upper32bits.get() == 0
342 }
343}
344
345mod wire_helpers {
346 use core::{ptr, slice};
347
348 use crate::data;
349 use crate::private::arena::*;
350 #[cfg(feature = "alloc")]
351 use crate::private::capability::ClientHook;
352 use crate::private::layout::ElementSize::*;
353 use crate::private::layout::{data_bits_per_element, pointers_per_element};
354 use crate::private::layout::{CapTableBuilder, CapTableReader};
355 use crate::private::layout::{
356 ElementSize, ListBuilder, ListReader, StructBuilder, StructReader, StructSize, WirePointer,
357 WirePointerKind,
358 };
359 use crate::private::units::*;
360 use crate::text;
361 use crate::{Error, ErrorKind, MessageSize, Result};
362
363 pub struct SegmentAnd<T> {
364 #[allow(dead_code)]
365 segment_id: u32,
366 pub value: T,
367 }
368
369 #[inline]
370 pub fn round_bytes_up_to_words(bytes: ByteCount32) -> WordCount32 {
371 (bytes + 7) / BYTES_PER_WORD as u32
373 }
374
375 #[inline]
380 pub fn round_bits_up_to_words(bits: BitCount64) -> WordCount32 {
381 ((bits + 63) / (BITS_PER_WORD as u64)) as WordCount32
383 }
384
385 #[allow(dead_code)]
386 #[inline]
387 pub fn round_bits_up_to_bytes(bits: BitCount64) -> ByteCount32 {
388 ((bits + 7) / (BITS_PER_BYTE as u64)) as ByteCount32
389 }
390
391 #[inline]
392 pub fn bounds_check(
393 arena: &dyn ReaderArena,
394 segment_id: u32,
395 start: *const u8,
396 size_in_words: usize,
397 _kind: WirePointerKind,
398 ) -> Result<()> {
399 arena.contains_interval(segment_id, start, size_in_words)
400 }
401
402 #[inline]
403 pub fn amplified_read(arena: &dyn ReaderArena, virtual_amount: u64) -> Result<()> {
404 arena.amplified_read(virtual_amount)
405 }
406
407 #[inline]
408 pub unsafe fn copy_nonoverlapping_check_zero<T>(src: *const T, dst: *mut T, count: usize) {
409 if count > 0 {
410 ptr::copy_nonoverlapping(src, dst, count);
411 }
412 }
413
414 #[inline]
415 pub unsafe fn allocate(
416 arena: &mut dyn BuilderArena,
417 reff: *mut WirePointer,
418 segment_id: u32,
419 amount: WordCount32,
420 kind: WirePointerKind,
421 ) -> (*mut u8, *mut WirePointer, u32) {
422 let is_null = (*reff).is_null();
423 if !is_null {
424 zero_object(arena, segment_id, reff)
425 }
426
427 if amount == 0 && kind == WirePointerKind::Struct {
428 (*reff).set_kind_and_target_for_empty_struct();
429 return (reff as *mut _, reff, segment_id);
430 }
431
432 match arena.allocate(segment_id, amount) {
433 None => {
434 let amount_plus_ref = amount + POINTER_SIZE_IN_WORDS as u32;
439 let (segment_id, word_idx) = arena.allocate_anywhere(amount_plus_ref);
440 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
441 let ptr = seg_start.offset(word_idx as isize * BYTES_PER_WORD as isize);
442
443 (*reff).set_far(false, word_idx);
446 (*reff).set_far_segment_id(segment_id);
447
448 let reff = ptr as *mut WirePointer;
451
452 let ptr1 = ptr.add(BYTES_PER_WORD);
453 (*reff).set_kind_and_target(kind, ptr1);
454 (ptr1, reff, segment_id)
455 }
456 Some(idx) => {
457 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
458 let ptr = (seg_start).offset(idx as isize * BYTES_PER_WORD as isize);
459 (*reff).set_kind_and_target(kind, ptr);
460 (ptr, reff, segment_id)
461 }
462 }
463 }
464
465 #[inline]
466 pub unsafe fn follow_builder_fars(
467 arena: &mut dyn BuilderArena,
468 reff: *mut WirePointer,
469 ref_target: *mut u8,
470 segment_id: u32,
471 ) -> Result<(*mut u8, *mut WirePointer, u32)> {
472 if (*reff).kind() == WirePointerKind::Far {
482 let segment_id = (*reff).far_segment_id();
483 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
484 let pad: *mut WirePointer =
485 (seg_start as *mut WirePointer).offset((*reff).far_position_in_segment() as isize);
486 if !(*reff).is_double_far() {
487 Ok((WirePointer::mut_target(pad), pad, segment_id))
488 } else {
489 let reff = pad.offset(1);
492
493 let segment_id = (*pad).far_segment_id();
494 let (segment_start, _segment_len) = arena.get_segment_mut(segment_id);
495 let ptr = segment_start
496 .offset((*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
497 Ok((ptr, reff, segment_id))
498 }
499 } else {
500 Ok((ref_target, reff, segment_id))
501 }
502 }
503
504 #[inline]
509 pub unsafe fn follow_fars(
510 arena: &dyn ReaderArena,
511 reff: *const WirePointer,
512 segment_id: u32,
513 ) -> Result<(*const u8, *const WirePointer, u32)> {
514 if (*reff).kind() == WirePointerKind::Far {
515 let far_segment_id = (*reff).far_segment_id();
516
517 let (seg_start, _seg_len) = arena.get_segment(far_segment_id)?;
518 let ptr = seg_start
519 .offset((*reff).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
520
521 let pad_words: usize = if (*reff).is_double_far() { 2 } else { 1 };
522 bounds_check(arena, far_segment_id, ptr, pad_words, WirePointerKind::Far)?;
523
524 let pad: *const WirePointer = ptr as *const _;
525
526 if !(*reff).is_double_far() {
527 Ok((
528 WirePointer::target_from_segment(pad, arena, far_segment_id)?,
529 pad,
530 far_segment_id,
531 ))
532 } else {
533 let tag = pad.offset(1);
537 let double_far_segment_id = (*pad).far_segment_id();
538 let (segment_start, _segment_len) = arena.get_segment(double_far_segment_id)?;
539 let ptr = segment_start
540 .offset((*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
541 Ok((ptr, tag, double_far_segment_id))
542 }
543 } else {
544 Ok((
545 WirePointer::target_from_segment(reff, arena, segment_id)?,
546 reff,
547 segment_id,
548 ))
549 }
550 }
551
552 pub unsafe fn zero_object(
553 arena: &mut dyn BuilderArena,
554 segment_id: u32,
555 reff: *mut WirePointer,
556 ) {
557 match (*reff).kind() {
562 WirePointerKind::Struct | WirePointerKind::List | WirePointerKind::Other => {
563 zero_object_helper(arena, segment_id, reff, WirePointer::mut_target(reff))
564 }
565 WirePointerKind::Far => {
566 let segment_id = (*reff).far_segment_id();
567 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
568 let pad: *mut WirePointer = (seg_start as *mut WirePointer)
569 .offset((*reff).far_position_in_segment() as isize);
570
571 if (*reff).is_double_far() {
572 let segment_id = (*pad).far_segment_id();
573
574 let (seg_start, _seg_len) = arena.get_segment_mut(segment_id);
575 let ptr = seg_start.offset(
576 (*pad).far_position_in_segment() as isize * BYTES_PER_WORD as isize,
577 );
578 zero_object_helper(arena, segment_id, pad.offset(1), ptr);
579
580 ptr::write_bytes(pad, 0u8, 2);
581 } else {
582 zero_object(arena, segment_id, pad);
583 ptr::write_bytes(pad, 0u8, 1);
584 }
585 }
586 }
587 }
588
589 pub unsafe fn zero_object_helper(
590 arena: &mut dyn BuilderArena,
591 segment_id: u32,
592 tag: *mut WirePointer,
593 ptr: *mut u8,
594 ) {
595 match (*tag).kind() {
596 WirePointerKind::Other => {
597 panic!("Don't know how to handle OTHER")
598 }
599 WirePointerKind::Struct => {
600 let pointer_section: *mut WirePointer = ptr
601 .offset((*tag).struct_data_size() as isize * BYTES_PER_WORD as isize)
602 as *mut _;
603
604 let count = (*tag).struct_ptr_count() as isize;
605 for i in 0..count {
606 zero_object(arena, segment_id, pointer_section.offset(i));
607 }
608 ptr::write_bytes(
609 ptr,
610 0u8,
611 (*tag).struct_word_size() as usize * BYTES_PER_WORD,
612 );
613 }
614 WirePointerKind::List => match (*tag).list_element_size() {
615 Void => {}
616 Bit | Byte | TwoBytes | FourBytes | EightBytes => ptr::write_bytes(
617 ptr,
618 0u8,
619 BYTES_PER_WORD
620 * round_bits_up_to_words(
621 u64::from((*tag).list_element_count())
622 * u64::from(data_bits_per_element((*tag).list_element_size())),
623 ) as usize,
624 ),
625 Pointer => {
626 let count = (*tag).list_element_count() as usize;
627 for i in 0..count as isize {
628 zero_object(
629 arena,
630 segment_id,
631 ptr.offset(i * BYTES_PER_WORD as isize) as *mut _,
632 );
633 }
634 ptr::write_bytes(ptr, 0u8, count * BYTES_PER_WORD);
635 }
636 InlineComposite => {
637 let element_tag: *mut WirePointer = ptr as *mut _;
638
639 assert!(
640 (*element_tag).kind() == WirePointerKind::Struct,
641 "Don't know how to handle non-STRUCT inline composite"
642 );
643
644 let data_size = (*element_tag).struct_data_size();
645 let pointer_count = (*element_tag).struct_ptr_count();
646 let mut pos = ptr.add(BYTES_PER_WORD);
647 let count = (*element_tag).inline_composite_list_element_count();
648 if pointer_count > 0 {
649 for _ in 0..count {
650 pos = pos.offset(data_size as isize * BYTES_PER_WORD as isize);
651 for _ in 0..pointer_count {
652 zero_object(arena, segment_id, pos as *mut WirePointer);
653 pos = pos.add(BYTES_PER_WORD);
654 }
655 }
656 }
657 ptr::write_bytes(
658 ptr,
659 0u8,
660 BYTES_PER_WORD * ((*element_tag).struct_word_size() * count + 1) as usize,
661 );
662 }
663 },
664 WirePointerKind::Far => {
665 panic!("Unexpected FAR pointer")
666 }
667 }
668 }
669
670 #[inline]
671 pub unsafe fn zero_pointer_and_fars(
672 arena: &mut dyn BuilderArena,
673 _segment_id: u32,
674 reff: *mut WirePointer,
675 ) -> Result<()> {
676 if (*reff).kind() == WirePointerKind::Far {
680 let far_segment_id = (*reff).far_segment_id();
681 let (seg_start, _seg_len) = arena.get_segment_mut(far_segment_id);
682 let pad = seg_start
683 .offset((*reff).far_position_in_segment() as isize * BYTES_PER_WORD as isize);
684 let num_elements = if (*reff).is_double_far() { 2 } else { 1 };
685 ptr::write_bytes(pad, 0, num_elements * BYTES_PER_WORD);
686 }
687 ptr::write_bytes(reff, 0, 1);
688 Ok(())
689 }
690
691 pub unsafe fn total_size(
692 arena: &dyn ReaderArena,
693 segment_id: u32,
694 reff: *const WirePointer,
695 mut nesting_limit: i32,
696 ) -> Result<MessageSize> {
697 let mut result = MessageSize {
698 word_count: 0,
699 cap_count: 0,
700 };
701
702 if (*reff).is_null() {
703 return Ok(result);
704 };
705
706 if nesting_limit <= 0 {
707 return Err(Error::from_kind(ErrorKind::MessageIsTooDeeplyNested));
708 }
709
710 nesting_limit -= 1;
711
712 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
713
714 match (*reff).kind() {
715 WirePointerKind::Struct => {
716 bounds_check(
717 arena,
718 segment_id,
719 ptr,
720 (*reff).struct_word_size() as usize,
721 WirePointerKind::Struct,
722 )?;
723 result.word_count += u64::from((*reff).struct_word_size());
724
725 let pointer_section: *const WirePointer = ptr
726 .offset((*reff).struct_data_size() as isize * BYTES_PER_WORD as isize)
727 as *const _;
728 let count: isize = (*reff).struct_ptr_count() as isize;
729 for i in 0..count {
730 result +=
731 total_size(arena, segment_id, pointer_section.offset(i), nesting_limit)?;
732 }
733 }
734 WirePointerKind::List => {
735 match (*reff).list_element_size() {
736 Void => {}
737 Bit | Byte | TwoBytes | FourBytes | EightBytes => {
738 let total_words = round_bits_up_to_words(
739 u64::from((*reff).list_element_count())
740 * u64::from(data_bits_per_element((*reff).list_element_size())),
741 );
742 bounds_check(
743 arena,
744 segment_id,
745 ptr,
746 total_words as usize,
747 WirePointerKind::List,
748 )?;
749 result.word_count += u64::from(total_words);
750 }
751 Pointer => {
752 let count = (*reff).list_element_count();
753 bounds_check(
754 arena,
755 segment_id,
756 ptr,
757 count as usize * WORDS_PER_POINTER,
758 WirePointerKind::List,
759 )?;
760
761 result.word_count += u64::from(count) * WORDS_PER_POINTER as u64;
762
763 for i in 0..count as isize {
764 result += total_size(
765 arena,
766 segment_id,
767 (ptr as *const WirePointer).offset(i),
768 nesting_limit,
769 )?;
770 }
771 }
772 InlineComposite => {
773 let word_count = (*reff).list_inline_composite_word_count();
774 bounds_check(
775 arena,
776 segment_id,
777 ptr,
778 word_count as usize + POINTER_SIZE_IN_WORDS,
779 WirePointerKind::List,
780 )?;
781
782 let element_tag: *const WirePointer = ptr as *const _;
783 let count = (*element_tag).inline_composite_list_element_count();
784
785 if (*element_tag).kind() != WirePointerKind::Struct {
786 return Err(Error::from_kind(
787 ErrorKind::CantHandleNonStructInlineComposite,
788 ));
789 }
790
791 let actual_size =
792 u64::from((*element_tag).struct_word_size()) * u64::from(count);
793 if actual_size > u64::from(word_count) {
794 return Err(Error::from_kind(
795 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
796 ));
797 }
798
799 result.word_count += actual_size + POINTER_SIZE_IN_WORDS as u64;
802
803 let data_size = (*element_tag).struct_data_size();
804 let pointer_count = (*element_tag).struct_ptr_count();
805
806 if pointer_count > 0 {
807 let mut pos = ptr.add(BYTES_PER_WORD);
808 for _ in 0..count {
809 pos = pos.offset(data_size as isize * BYTES_PER_WORD as isize);
810
811 for _ in 0..pointer_count {
812 result += total_size(
813 arena,
814 segment_id,
815 pos as *const WirePointer,
816 nesting_limit,
817 )?;
818 pos = pos.add(BYTES_PER_WORD);
819 }
820 }
821 }
822 }
823 }
824 }
825 WirePointerKind::Far => {
826 return Err(Error::from_kind(ErrorKind::MalformedDoubleFarPointer));
827 }
828 WirePointerKind::Other => {
829 if (*reff).is_capability() {
830 result.cap_count += 1;
831 } else {
832 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
833 }
834 }
835 }
836
837 Ok(result)
838 }
839
840 unsafe fn copy_struct(
842 arena: &mut dyn BuilderArena,
843 segment_id: u32,
844 cap_table: CapTableBuilder,
845 dst: *mut u8,
846 src: *const u8,
847 data_size: isize,
848 pointer_count: isize,
849 ) {
850 copy_nonoverlapping_check_zero(src, dst, data_size as usize * BYTES_PER_WORD);
851
852 let src_refs: *const WirePointer = (src as *const WirePointer).offset(data_size);
853 let dst_refs: *mut WirePointer = (dst as *mut WirePointer).offset(data_size);
854
855 for ii in 0..pointer_count {
856 copy_message(
857 arena,
858 segment_id,
859 cap_table,
860 dst_refs.offset(ii),
861 src_refs.offset(ii),
862 );
863 }
864 }
865
866 pub unsafe fn copy_message(
869 arena: &mut dyn BuilderArena,
870 segment_id: u32,
871 cap_table: CapTableBuilder,
872 dst: *mut WirePointer,
873 src: *const WirePointer,
874 ) -> (*mut u8, *mut WirePointer, u32) {
875 match (*src).kind() {
876 WirePointerKind::Struct => {
877 if (*src).is_null() {
878 ptr::write_bytes(dst, 0, 1);
879 (ptr::null_mut(), dst, segment_id)
880 } else {
881 let src_ptr = WirePointer::target(src);
882 let (dst_ptr, dst, segment_id) = allocate(
883 arena,
884 dst,
885 segment_id,
886 (*src).struct_word_size(),
887 WirePointerKind::Struct,
888 );
889 copy_struct(
890 arena,
891 segment_id,
892 cap_table,
893 dst_ptr,
894 src_ptr,
895 (*src).struct_data_size() as isize,
896 (*src).struct_ptr_count() as isize,
897 );
898 (*dst).set_struct_size_from_pieces(
899 (*src).struct_data_size(),
900 (*src).struct_ptr_count(),
901 );
902 (dst_ptr, dst, segment_id)
903 }
904 }
905 WirePointerKind::List => match (*src).list_element_size() {
906 ElementSize::Void
907 | ElementSize::Bit
908 | ElementSize::Byte
909 | ElementSize::TwoBytes
910 | ElementSize::FourBytes
911 | ElementSize::EightBytes => {
912 let word_count = round_bits_up_to_words(
913 u64::from((*src).list_element_count())
914 * u64::from(data_bits_per_element((*src).list_element_size())),
915 );
916 let src_ptr = WirePointer::target(src);
917 let (dst_ptr, dst, segment_id) =
918 allocate(arena, dst, segment_id, word_count, WirePointerKind::List);
919 copy_nonoverlapping_check_zero(
920 src_ptr,
921 dst_ptr,
922 word_count as usize * BYTES_PER_WORD,
923 );
924 (*dst).set_list_size_and_count(
925 (*src).list_element_size(),
926 (*src).list_element_count(),
927 );
928 (dst_ptr, dst, segment_id)
929 }
930
931 ElementSize::Pointer => {
932 let src_refs: *const WirePointer = WirePointer::target(src) as _;
933 let (dst_refs, dst, segment_id) = allocate(
934 arena,
935 dst,
936 segment_id,
937 (*src).list_element_count(),
938 WirePointerKind::List,
939 );
940 for ii in 0..((*src).list_element_count() as isize) {
941 copy_message(
942 arena,
943 segment_id,
944 cap_table,
945 dst_refs.offset(ii * BYTES_PER_WORD as isize) as *mut WirePointer,
946 src_refs.offset(ii),
947 );
948 }
949 (*dst)
950 .set_list_size_and_count(ElementSize::Pointer, (*src).list_element_count());
951 (dst_refs, dst, segment_id)
952 }
953 ElementSize::InlineComposite => {
954 let src_ptr = WirePointer::target(src);
955 let (dst_ptr, dst, segment_id) = allocate(
956 arena,
957 dst,
958 segment_id,
959 (*src).list_inline_composite_word_count() + 1,
960 WirePointerKind::List,
961 );
962
963 (*dst).set_list_inline_composite((*src).list_inline_composite_word_count());
964
965 let src_tag: *const WirePointer = src_ptr as _;
966 ptr::copy_nonoverlapping(src_tag, dst_ptr as *mut WirePointer, 1);
967
968 let mut src_element = src_ptr.add(BYTES_PER_WORD);
969 let mut dst_element = dst_ptr.add(BYTES_PER_WORD);
970
971 if (*src_tag).kind() != WirePointerKind::Struct {
972 panic!("unsupported INLINE_COMPOSITE list");
973 }
974 for _ in 0..(*src_tag).inline_composite_list_element_count() {
975 copy_struct(
976 arena,
977 segment_id,
978 cap_table,
979 dst_element,
980 src_element,
981 (*src_tag).struct_data_size() as isize,
982 (*src_tag).struct_ptr_count() as isize,
983 );
984 src_element = src_element.offset(
985 BYTES_PER_WORD as isize * (*src_tag).struct_word_size() as isize,
986 );
987 dst_element = dst_element.offset(
988 BYTES_PER_WORD as isize * (*src_tag).struct_word_size() as isize,
989 );
990 }
991 (dst_ptr, dst, segment_id)
992 }
993 },
994 WirePointerKind::Other => {
995 panic!("Unchecked message contained an OTHER pointer.")
996 }
997 WirePointerKind::Far => {
998 panic!("Unchecked message contained a far pointer.")
999 }
1000 }
1001 }
1002
1003 pub unsafe fn transfer_pointer(
1004 arena: &mut dyn BuilderArena,
1005 dst_segment_id: u32,
1006 dst: *mut WirePointer,
1007 src_segment_id: u32,
1008 src: *mut WirePointer,
1009 ) {
1010 assert!((*dst).is_null());
1021 if (*src).is_null() {
1024 ptr::write_bytes(dst, 0, 1);
1025 } else if (*src).is_positional() {
1026 transfer_pointer_split(
1027 arena,
1028 dst_segment_id,
1029 dst,
1030 src_segment_id,
1031 src,
1032 WirePointer::mut_target(src),
1033 );
1034 } else {
1035 ptr::copy_nonoverlapping(src, dst, 1);
1036 }
1037 }
1038
1039 pub unsafe fn transfer_pointer_split(
1040 arena: &mut dyn BuilderArena,
1041 dst_segment_id: u32,
1042 dst: *mut WirePointer,
1043 src_segment_id: u32,
1044 src_tag: *mut WirePointer,
1045 src_ptr: *mut u8,
1046 ) {
1047 if dst_segment_id == src_segment_id {
1051 if (*src_tag).kind() == WirePointerKind::Struct && (*src_tag).struct_word_size() == 0 {
1054 (*dst).set_kind_and_target_for_empty_struct();
1055 } else {
1056 (*dst).set_kind_and_target((*src_tag).kind(), src_ptr);
1057 }
1058 ptr::copy_nonoverlapping(&(*src_tag).upper32bits, &mut (*dst).upper32bits, 1);
1060 } else {
1061 match arena.allocate(src_segment_id, 1) {
1065 None => {
1066 let (far_segment_id, word_idx) = arena.allocate_anywhere(2);
1068 let (seg_start, _seg_len) = arena.get_segment_mut(far_segment_id);
1069 let landing_pad: *mut WirePointer =
1070 (seg_start as *mut WirePointer).offset(word_idx as isize);
1071
1072 let (src_seg_start, _seg_len) = arena.get_segment_mut(src_segment_id);
1073
1074 (*landing_pad).set_far(
1075 false,
1076 ((src_ptr as usize - src_seg_start as usize) / BYTES_PER_WORD) as u32,
1077 );
1078 (*landing_pad).set_far_segment_id(src_segment_id);
1079
1080 let landing_pad1 = landing_pad.offset(1);
1081 (*landing_pad1).set_kind_with_zero_offset((*src_tag).kind());
1082
1083 ptr::copy_nonoverlapping(
1084 &(*src_tag).upper32bits,
1085 &mut (*landing_pad1).upper32bits,
1086 1,
1087 );
1088
1089 (*dst).set_far(true, word_idx);
1090 (*dst).set_far_segment_id(far_segment_id);
1091 }
1092 Some(landing_pad_word) => {
1093 let (seg_start, seg_len) = arena.get_segment_mut(src_segment_id);
1095 assert!(landing_pad_word < seg_len);
1096 let landing_pad: *mut WirePointer =
1097 (seg_start as *mut WirePointer).offset(landing_pad_word as isize);
1098 (*landing_pad).set_kind_and_target((*src_tag).kind(), src_ptr);
1099 ptr::copy_nonoverlapping(
1100 &(*src_tag).upper32bits,
1101 &mut (*landing_pad).upper32bits,
1102 1,
1103 );
1104
1105 (*dst).set_far(false, landing_pad_word);
1106 (*dst).set_far_segment_id(src_segment_id);
1107 }
1108 }
1109 }
1110 }
1111
1112 #[inline]
1113 pub unsafe fn init_struct_pointer(
1114 arena: &mut dyn BuilderArena,
1115 reff: *mut WirePointer,
1116 segment_id: u32,
1117 cap_table: CapTableBuilder,
1118 size: StructSize,
1119 ) -> StructBuilder<'_> {
1120 let (ptr, reff, segment_id) = allocate(
1121 arena,
1122 reff,
1123 segment_id,
1124 size.total(),
1125 WirePointerKind::Struct,
1126 );
1127 (*reff).set_struct_size(size);
1128
1129 StructBuilder {
1130 arena,
1131 segment_id,
1132 cap_table,
1133 data: ptr as *mut _,
1134 pointers: ptr.offset((size.data as usize) as isize * BYTES_PER_WORD as isize) as *mut _,
1135 data_size: u32::from(size.data) * (BITS_PER_WORD as BitCount32),
1136 pointer_count: size.pointers,
1137 }
1138 }
1139
1140 #[inline]
1141 pub unsafe fn get_writable_struct_pointer<'a>(
1142 arena: &'a mut dyn BuilderArena,
1143 mut reff: *mut WirePointer,
1144 mut segment_id: u32,
1145 cap_table: CapTableBuilder,
1146 size: StructSize,
1147 default: Option<&'a [crate::Word]>,
1148 ) -> Result<StructBuilder<'a>> {
1149 let mut ref_target = WirePointer::mut_target(reff);
1150
1151 if (*reff).is_null() {
1152 match default {
1153 None => {
1154 return Ok(init_struct_pointer(
1155 arena, reff, segment_id, cap_table, size,
1156 ))
1157 }
1158 Some(d) if (*(d.as_ptr() as *const WirePointer)).is_null() => {
1159 return Ok(init_struct_pointer(
1160 arena, reff, segment_id, cap_table, size,
1161 ))
1162 }
1163 Some(d) => {
1164 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1165 arena,
1166 segment_id,
1167 cap_table,
1168 reff,
1169 d.as_ptr() as *const WirePointer,
1170 );
1171 reff = new_reff;
1172 segment_id = new_segment_id;
1173 ref_target = new_ref_target;
1174 }
1175 }
1176 }
1177
1178 let (old_ptr, old_ref, old_segment_id) =
1179 follow_builder_fars(arena, reff, ref_target, segment_id)?;
1180 if (*old_ref).kind() != WirePointerKind::Struct {
1181 return Err(Error::from_kind(
1182 ErrorKind::MessageContainsNonStructPointerWhereStructPointerWasExpected,
1183 ));
1184 }
1185
1186 let old_data_size = (*old_ref).struct_data_size();
1187 let old_pointer_count = (*old_ref).struct_ptr_count();
1188 let old_pointer_section: *mut WirePointer =
1189 old_ptr.offset(old_data_size as isize * BYTES_PER_WORD as isize) as *mut _;
1190
1191 if old_data_size < size.data || old_pointer_count < size.pointers {
1192 let new_data_size = ::core::cmp::max(old_data_size, size.data);
1199 let new_pointer_count = ::core::cmp::max(old_pointer_count, size.pointers);
1200 let total_size =
1201 u32::from(new_data_size) + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1202
1203 zero_pointer_and_fars(arena, segment_id, reff)?;
1205
1206 let (ptr, reff, segment_id) =
1207 allocate(arena, reff, segment_id, total_size, WirePointerKind::Struct);
1208 (*reff).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1209
1210 copy_nonoverlapping_check_zero(old_ptr, ptr, old_data_size as usize * BYTES_PER_WORD);
1213
1214 let new_pointer_section: *mut WirePointer =
1216 ptr.offset(new_data_size as isize * BYTES_PER_WORD as isize) as *mut _;
1217 for i in 0..old_pointer_count as isize {
1218 transfer_pointer(
1219 arena,
1220 segment_id,
1221 new_pointer_section.offset(i),
1222 old_segment_id,
1223 old_pointer_section.offset(i),
1224 );
1225 }
1226
1227 ptr::write_bytes(
1228 old_ptr,
1229 0,
1230 (old_data_size as usize + old_pointer_count as usize) * BYTES_PER_WORD,
1231 );
1232
1233 Ok(StructBuilder {
1234 arena,
1235 segment_id,
1236 cap_table,
1237 data: ptr as *mut _,
1238 pointers: new_pointer_section,
1239 data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1240 pointer_count: new_pointer_count,
1241 })
1242 } else {
1243 Ok(StructBuilder {
1244 arena,
1245 segment_id: old_segment_id,
1246 cap_table,
1247 data: old_ptr,
1248 pointers: old_pointer_section,
1249 data_size: u32::from(old_data_size) * BITS_PER_WORD as u32,
1250 pointer_count: old_pointer_count,
1251 })
1252 }
1253 }
1254
1255 #[inline]
1256 pub unsafe fn init_list_pointer(
1257 arena: &mut dyn BuilderArena,
1258 reff: *mut WirePointer,
1259 segment_id: u32,
1260 cap_table: CapTableBuilder,
1261 element_count: ElementCount32,
1262 element_size: ElementSize,
1263 ) -> ListBuilder<'_> {
1264 assert!(
1265 element_size != InlineComposite,
1266 "Should have called initStructListPointer() instead"
1267 );
1268
1269 let data_size = data_bits_per_element(element_size);
1270 let pointer_count = pointers_per_element(element_size);
1271 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
1272 let word_count = round_bits_up_to_words(u64::from(element_count) * u64::from(step));
1273 let (ptr, reff, segment_id) =
1274 allocate(arena, reff, segment_id, word_count, WirePointerKind::List);
1275
1276 (*reff).set_list_size_and_count(element_size, element_count);
1277
1278 ListBuilder {
1279 arena,
1280 segment_id,
1281 cap_table,
1282 ptr,
1283 step,
1284 element_count,
1285 element_size,
1286 struct_data_size: data_size,
1287 struct_pointer_count: pointer_count as u16,
1288 }
1289 }
1290
1291 #[inline]
1292 pub unsafe fn init_struct_list_pointer(
1293 arena: &mut dyn BuilderArena,
1294 reff: *mut WirePointer,
1295 segment_id: u32,
1296 cap_table: CapTableBuilder,
1297 element_count: ElementCount32,
1298 element_size: StructSize,
1299 ) -> ListBuilder<'_> {
1300 let words_per_element = element_size.total();
1301
1302 let word_count: WordCount32 = element_count * words_per_element;
1304 let (ptr, reff, segment_id) = allocate(
1305 arena,
1306 reff,
1307 segment_id,
1308 POINTER_SIZE_IN_WORDS as u32 + word_count,
1309 WirePointerKind::List,
1310 );
1311 let ptr = ptr as *mut WirePointer;
1312
1313 (*reff).set_list_inline_composite(word_count);
1315 (*ptr).set_kind_and_inline_composite_list_element_count(
1316 WirePointerKind::Struct,
1317 element_count,
1318 );
1319 (*ptr).set_struct_size(element_size);
1320
1321 let ptr1 = ptr.add(POINTER_SIZE_IN_WORDS);
1322
1323 ListBuilder {
1324 arena,
1325 segment_id,
1326 cap_table,
1327 ptr: ptr1 as *mut _,
1328 step: words_per_element * BITS_PER_WORD as u32,
1329 element_count,
1330 element_size: ElementSize::InlineComposite,
1331 struct_data_size: u32::from(element_size.data) * (BITS_PER_WORD as u32),
1332 struct_pointer_count: element_size.pointers,
1333 }
1334 }
1335
1336 #[inline]
1337 pub unsafe fn get_writable_list_pointer(
1338 arena: &mut dyn BuilderArena,
1339 mut orig_ref: *mut WirePointer,
1340 mut orig_segment_id: u32,
1341 cap_table: CapTableBuilder,
1342 element_size: ElementSize,
1343 default_value: *const u8,
1344 ) -> Result<ListBuilder<'_>> {
1345 assert!(
1346 element_size != InlineComposite,
1347 "Use get_writable_struct_list_pointer() for struct lists"
1348 );
1349
1350 let mut orig_ref_target = WirePointer::mut_target(orig_ref);
1351
1352 if (*orig_ref).is_null() {
1353 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
1354 return Ok(ListBuilder::new_default(arena));
1355 }
1356 let (new_orig_ref_target, new_orig_ref, new_orig_segment_id) = copy_message(
1357 arena,
1358 orig_segment_id,
1359 cap_table,
1360 orig_ref,
1361 default_value as *const WirePointer,
1362 );
1363 orig_ref_target = new_orig_ref_target;
1364 orig_ref = new_orig_ref;
1365 orig_segment_id = new_orig_segment_id;
1366 }
1367
1368 let (mut ptr, reff, segment_id) =
1374 follow_builder_fars(arena, orig_ref, orig_ref_target, orig_segment_id)?;
1375
1376 if (*reff).kind() != WirePointerKind::List {
1377 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1378 }
1379
1380 let old_size = (*reff).list_element_size();
1381
1382 if old_size == InlineComposite {
1383 let tag: *const WirePointer = ptr as *const _;
1391
1392 if (*tag).kind() != WirePointerKind::Struct {
1393 return Err(Error::from_kind(
1394 ErrorKind::InlineCompositeListWithNonStructElementsNotSupported,
1395 ));
1396 }
1397
1398 ptr = ptr.add(BYTES_PER_WORD);
1399
1400 let data_size = (*tag).struct_data_size();
1401 let pointer_count = (*tag).struct_ptr_count();
1402
1403 match element_size {
1404 Void => {} Bit => {
1406 return Err(Error::from_kind(
1407 ErrorKind::FoundStructListWhereBitListWasExpected,
1408 ));
1409 }
1410 Byte | TwoBytes | FourBytes | EightBytes => {
1411 if data_size < 1 {
1412 return Err(Error::from_kind(
1413 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1414 ));
1415 }
1416 }
1417 Pointer => {
1418 if pointer_count < 1 {
1419 return Err(Error::from_kind(
1420 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1421 ));
1422 }
1423 ptr = ptr.offset(data_size as isize * BYTES_PER_WORD as isize);
1425 }
1426 InlineComposite => {
1427 unreachable!()
1428 }
1429 }
1430 Ok(ListBuilder {
1433 arena,
1434 segment_id,
1435 cap_table,
1436 ptr: ptr as *mut _,
1437 element_count: (*tag).inline_composite_list_element_count(),
1438 element_size: ElementSize::InlineComposite,
1439 step: (*tag).struct_word_size() * BITS_PER_WORD as u32,
1440 struct_data_size: u32::from(data_size) * BITS_PER_WORD as u32,
1441 struct_pointer_count: pointer_count,
1442 })
1443 } else {
1444 let data_size = data_bits_per_element(old_size);
1445 let pointer_count = pointers_per_element(old_size);
1446
1447 if data_size < data_bits_per_element(element_size)
1448 || pointer_count < pointers_per_element(element_size)
1449 {
1450 return Err(Error::from_kind(
1451 ErrorKind::ExistingListValueIsIncompatibleWithExpectedType,
1452 ));
1453 }
1454
1455 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
1456
1457 Ok(ListBuilder {
1458 arena,
1459 segment_id,
1460 cap_table,
1461 ptr: ptr as *mut _,
1462 step,
1463 element_count: (*reff).list_element_count(),
1464 element_size: old_size,
1465 struct_data_size: data_size,
1466 struct_pointer_count: pointer_count as u16,
1467 })
1468 }
1469 }
1470
1471 #[inline]
1472 pub unsafe fn get_writable_struct_list_pointer(
1473 arena: &mut dyn BuilderArena,
1474 mut orig_ref: *mut WirePointer,
1475 mut orig_segment_id: u32,
1476 cap_table: CapTableBuilder,
1477 element_size: StructSize,
1478 default_value: *const u8,
1479 ) -> Result<ListBuilder<'_>> {
1480 let mut orig_ref_target = WirePointer::mut_target(orig_ref);
1481
1482 if (*orig_ref).is_null() {
1483 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
1484 return Ok(ListBuilder::new_default(arena));
1485 }
1486 let (new_orig_ref_target, new_orig_ref, new_orig_segment_id) = copy_message(
1487 arena,
1488 orig_segment_id,
1489 cap_table,
1490 orig_ref,
1491 default_value as *const WirePointer,
1492 );
1493 orig_ref_target = new_orig_ref_target;
1494 orig_ref = new_orig_ref;
1495 orig_segment_id = new_orig_segment_id;
1496 }
1497
1498 let (mut old_ptr, old_ref, old_segment_id) =
1501 follow_builder_fars(arena, orig_ref, orig_ref_target, orig_segment_id)?;
1502
1503 if (*old_ref).kind() != WirePointerKind::List {
1504 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1505 }
1506
1507 let old_size = (*old_ref).list_element_size();
1508
1509 if old_size == InlineComposite {
1510 let old_tag: *const WirePointer = old_ptr as *const _;
1513 old_ptr = old_ptr.add(BYTES_PER_WORD);
1514 if (*old_tag).kind() != WirePointerKind::Struct {
1515 return Err(Error::from_kind(
1516 ErrorKind::InlineCompositeListWithNonStructElementsNotSupported,
1517 ));
1518 }
1519
1520 let old_data_size = (*old_tag).struct_data_size();
1521 let old_pointer_count = (*old_tag).struct_ptr_count();
1522 let old_step =
1523 u32::from(old_data_size) + u32::from(old_pointer_count) * WORDS_PER_POINTER as u32;
1524 let element_count = (*old_tag).inline_composite_list_element_count();
1525
1526 if old_data_size >= element_size.data && old_pointer_count >= element_size.pointers {
1527 return Ok(ListBuilder {
1529 arena,
1530 segment_id: old_segment_id,
1531 cap_table,
1532 ptr: old_ptr as *mut _,
1533 element_count,
1534 element_size: ElementSize::InlineComposite,
1535 step: old_step * BITS_PER_WORD as u32,
1536 struct_data_size: u32::from(old_data_size) * BITS_PER_WORD as u32,
1537 struct_pointer_count: old_pointer_count,
1538 });
1539 }
1540
1541 let new_data_size = ::core::cmp::max(old_data_size, element_size.data);
1545 let new_pointer_count = ::core::cmp::max(old_pointer_count, element_size.pointers);
1546 let new_step =
1547 u32::from(new_data_size) + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1548 let total_size = new_step * element_count;
1549
1550 zero_pointer_and_fars(arena, orig_segment_id, orig_ref)?;
1552
1553 let (mut new_ptr, new_ref, new_segment_id) = allocate(
1554 arena,
1555 orig_ref,
1556 orig_segment_id,
1557 total_size + POINTER_SIZE_IN_WORDS as u32,
1558 WirePointerKind::List,
1559 );
1560 (*new_ref).set_list_inline_composite(total_size);
1561
1562 let new_tag: *mut WirePointer = new_ptr as *mut _;
1563 (*new_tag).set_kind_and_inline_composite_list_element_count(
1564 WirePointerKind::Struct,
1565 element_count,
1566 );
1567 (*new_tag).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1568 new_ptr = new_ptr.add(BYTES_PER_WORD);
1569
1570 let mut src = old_ptr as *mut WirePointer;
1571 let mut dst = new_ptr as *mut WirePointer;
1572 for _ in 0..element_count {
1573 copy_nonoverlapping_check_zero(src, dst, old_data_size as usize);
1575
1576 let new_pointer_section = dst.offset(new_data_size as isize);
1578 let old_pointer_section = src.offset(old_data_size as isize);
1579 for jj in 0..(old_pointer_count as isize) {
1580 transfer_pointer(
1581 arena,
1582 new_segment_id,
1583 new_pointer_section.offset(jj),
1584 old_segment_id,
1585 old_pointer_section.offset(jj),
1586 );
1587 }
1588
1589 dst = dst.offset(new_step as isize);
1590 src = src.offset(old_step as isize);
1591 }
1592
1593 ptr::write_bytes(
1594 old_ptr.offset(-(BYTES_PER_WORD as isize)),
1595 0,
1596 (u64::from(old_step) * u64::from(element_count)) as usize * BYTES_PER_WORD,
1597 );
1598
1599 Ok(ListBuilder {
1600 arena,
1601 segment_id: new_segment_id,
1602 cap_table,
1603 ptr: new_ptr,
1604 element_count,
1605 element_size: ElementSize::InlineComposite,
1606 step: new_step * BITS_PER_WORD as u32,
1607 struct_data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1608 struct_pointer_count: new_pointer_count,
1609 })
1610 } else {
1611 let old_data_size = data_bits_per_element(old_size);
1614 let old_pointer_count = pointers_per_element(old_size);
1615 let old_step = old_data_size + old_pointer_count * BITS_PER_POINTER as u32;
1616 let element_count = (*old_ref).list_element_count();
1617
1618 if old_size == ElementSize::Void {
1619 Ok(init_struct_list_pointer(
1621 arena,
1622 orig_ref,
1623 orig_segment_id,
1624 cap_table,
1625 element_count,
1626 element_size,
1627 ))
1628 } else {
1629 if old_size == ElementSize::Bit {
1632 return Err(Error::from_kind(
1633 ErrorKind::FoundBitListWhereStructListWasExpected,
1634 ));
1635 }
1636
1637 let mut new_data_size = element_size.data;
1638 let mut new_pointer_count = element_size.pointers;
1639
1640 if old_size == ElementSize::Pointer {
1641 new_pointer_count = ::core::cmp::max(new_pointer_count, 1);
1642 } else {
1643 new_data_size = ::core::cmp::max(new_data_size, 1);
1645 }
1646
1647 let new_step = u32::from(new_data_size)
1648 + u32::from(new_pointer_count) * WORDS_PER_POINTER as u32;
1649 let total_words = element_count * new_step;
1650
1651 zero_pointer_and_fars(arena, orig_segment_id, orig_ref)?;
1653
1654 let (mut new_ptr, new_ref, new_segment_id) = allocate(
1655 arena,
1656 orig_ref,
1657 orig_segment_id,
1658 total_words + POINTER_SIZE_IN_WORDS as u32,
1659 WirePointerKind::List,
1660 );
1661 (*new_ref).set_list_inline_composite(total_words);
1662
1663 let tag: *mut WirePointer = new_ptr as *mut _;
1664 (*tag).set_kind_and_inline_composite_list_element_count(
1665 WirePointerKind::Struct,
1666 element_count,
1667 );
1668 (*tag).set_struct_size_from_pieces(new_data_size, new_pointer_count);
1669 new_ptr = new_ptr.add(BYTES_PER_WORD);
1670
1671 if old_size == ElementSize::Pointer {
1672 let mut dst = new_ptr.offset(new_data_size as isize * BYTES_PER_WORD as isize);
1673 let mut src: *mut WirePointer = old_ptr as *mut _;
1674 for _ in 0..element_count {
1675 transfer_pointer(arena, new_segment_id, dst as *mut _, old_segment_id, src);
1676 dst = dst.offset(new_step as isize * BYTES_PER_WORD as isize);
1677 src = src.offset(1);
1678 }
1679 } else {
1680 let mut dst = new_ptr;
1681 let mut src: *mut u8 = old_ptr;
1682 let old_byte_step = old_data_size / BITS_PER_BYTE as u32;
1683 for _ in 0..element_count {
1684 copy_nonoverlapping_check_zero(src, dst, old_byte_step as usize);
1685 src = src.offset(old_byte_step as isize);
1686 dst = dst.offset(new_step as isize * BYTES_PER_WORD as isize);
1687 }
1688 }
1689
1690 ptr::write_bytes(
1692 old_ptr,
1693 0,
1694 round_bits_up_to_bytes(u64::from(old_step) * u64::from(element_count)) as usize,
1695 );
1696
1697 Ok(ListBuilder {
1698 arena,
1699 segment_id: new_segment_id,
1700 cap_table,
1701 ptr: new_ptr,
1702 element_count,
1703 element_size: ElementSize::InlineComposite,
1704 step: new_step * BITS_PER_WORD as u32,
1705 struct_data_size: u32::from(new_data_size) * BITS_PER_WORD as u32,
1706 struct_pointer_count: new_pointer_count,
1707 })
1708 }
1709 }
1710 }
1711
1712 #[inline]
1713 pub unsafe fn init_text_pointer(
1714 arena: &mut dyn BuilderArena,
1715 reff: *mut WirePointer,
1716 segment_id: u32,
1717 size: ByteCount32,
1718 ) -> SegmentAnd<text::Builder<'_>> {
1719 let byte_size = size + 1;
1721
1722 let (ptr, reff, segment_id) = allocate(
1724 arena,
1725 reff,
1726 segment_id,
1727 round_bytes_up_to_words(byte_size),
1728 WirePointerKind::List,
1729 );
1730
1731 (*reff).set_list_size_and_count(Byte, byte_size);
1733
1734 SegmentAnd {
1735 segment_id,
1736 value: text::Builder::new(slice::from_raw_parts_mut(ptr, size as usize)),
1737 }
1738 }
1739
1740 #[inline]
1741 pub unsafe fn set_text_pointer<'a>(
1742 arena: &'a mut dyn BuilderArena,
1743 reff: *mut WirePointer,
1744 segment_id: u32,
1745 value: crate::text::Reader<'_>,
1746 ) -> SegmentAnd<text::Builder<'a>> {
1747 let value_bytes = value.as_bytes();
1748 let mut allocation = init_text_pointer(arena, reff, segment_id, value_bytes.len() as u32);
1750 allocation
1751 .value
1752 .reborrow()
1753 .as_bytes_mut()
1754 .copy_from_slice(value_bytes);
1755 allocation
1756 }
1757
1758 #[inline]
1759 pub unsafe fn get_writable_text_pointer<'a>(
1760 arena: &'a mut dyn BuilderArena,
1761 mut reff: *mut WirePointer,
1762 mut segment_id: u32,
1763 default: Option<&'a [crate::Word]>,
1764 ) -> Result<text::Builder<'a>> {
1765 let ref_target = if (*reff).is_null() {
1766 match default {
1767 None => return Ok(text::Builder::new(&mut [])),
1768 Some(d) => {
1769 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1770 arena,
1771 segment_id,
1772 Default::default(),
1773 reff,
1774 d.as_ptr() as *const _,
1775 );
1776 reff = new_reff;
1777 segment_id = new_segment_id;
1778 new_ref_target
1779 }
1780 }
1781 } else {
1782 WirePointer::mut_target(reff)
1783 };
1784
1785 let (ptr, reff, _segment_id) = follow_builder_fars(arena, reff, ref_target, segment_id)?;
1786
1787 if (*reff).kind() != WirePointerKind::List {
1788 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1789 }
1790 if (*reff).list_element_size() != Byte {
1791 return Err(Error::from_kind(
1792 ErrorKind::ExistingListPointerIsNotByteSized,
1793 ));
1794 }
1795
1796 let count = (*reff).list_element_count();
1797 if count == 0 || *ptr.offset((count - 1) as isize) != 0 {
1798 return Err(Error::from_kind(ErrorKind::TextBlobMissingNULTerminator));
1799 }
1800
1801 Ok(text::Builder::with_pos(
1803 slice::from_raw_parts_mut(ptr, (count - 1) as usize),
1804 (count - 1) as usize,
1805 ))
1806 }
1807
1808 #[inline]
1809 pub unsafe fn init_data_pointer(
1810 arena: &mut dyn BuilderArena,
1811 reff: *mut WirePointer,
1812 segment_id: u32,
1813 size: ByteCount32,
1814 ) -> SegmentAnd<data::Builder<'_>> {
1815 let (ptr, reff, segment_id) = allocate(
1817 arena,
1818 reff,
1819 segment_id,
1820 round_bytes_up_to_words(size),
1821 WirePointerKind::List,
1822 );
1823
1824 (*reff).set_list_size_and_count(Byte, size);
1826
1827 SegmentAnd {
1828 segment_id,
1829 value: data::builder_from_raw_parts(ptr, size),
1830 }
1831 }
1832
1833 #[inline]
1834 pub unsafe fn set_data_pointer<'a>(
1835 arena: &'a mut dyn BuilderArena,
1836 reff: *mut WirePointer,
1837 segment_id: u32,
1838 value: &[u8],
1839 ) -> SegmentAnd<data::Builder<'a>> {
1840 let allocation = init_data_pointer(arena, reff, segment_id, value.len() as u32);
1841 copy_nonoverlapping_check_zero(value.as_ptr(), allocation.value.as_mut_ptr(), value.len());
1842 allocation
1843 }
1844
1845 #[inline]
1846 pub unsafe fn get_writable_data_pointer<'a>(
1847 arena: &'a mut dyn BuilderArena,
1848 mut reff: *mut WirePointer,
1849 mut segment_id: u32,
1850 default: Option<&'a [crate::Word]>,
1851 ) -> Result<data::Builder<'a>> {
1852 let ref_target = if (*reff).is_null() {
1853 match default {
1854 None => return Ok(&mut []),
1855 Some(d) => {
1856 let (new_ref_target, new_reff, new_segment_id) = copy_message(
1857 arena,
1858 segment_id,
1859 Default::default(),
1860 reff,
1861 d.as_ptr() as *const _,
1862 );
1863 reff = new_reff;
1864 segment_id = new_segment_id;
1865 new_ref_target
1866 }
1867 }
1868 } else {
1869 WirePointer::mut_target(reff)
1870 };
1871
1872 let (ptr, reff, _segment_id) = follow_builder_fars(arena, reff, ref_target, segment_id)?;
1873
1874 if (*reff).kind() != WirePointerKind::List {
1875 return Err(Error::from_kind(ErrorKind::ExistingPointerIsNotAList));
1876 }
1877 if (*reff).list_element_size() != Byte {
1878 return Err(Error::from_kind(
1879 ErrorKind::ExistingListPointerIsNotByteSized,
1880 ));
1881 }
1882
1883 Ok(data::builder_from_raw_parts(
1884 ptr,
1885 (*reff).list_element_count(),
1886 ))
1887 }
1888
1889 pub unsafe fn set_struct_pointer(
1890 arena: &mut dyn BuilderArena,
1891 segment_id: u32,
1892 cap_table: CapTableBuilder,
1893 reff: *mut WirePointer,
1894 value: StructReader,
1895 canonicalize: bool,
1896 ) -> Result<SegmentAnd<*mut u8>> {
1897 let mut data_size: ByteCount32 = round_bits_up_to_bytes(u64::from(value.data_size));
1898 let mut ptr_count = value.pointer_count;
1899
1900 if canonicalize {
1901 if !(value.data_size == 1 || value.data_size % BITS_PER_BYTE as u32 == 0) {
1903 return Err(Error::from_kind(
1904 ErrorKind::StructReaderHadBitwidthOtherThan1,
1905 ));
1906 }
1907
1908 if value.data_size == 1 {
1909 if !value.get_bool_field(0) {
1910 data_size = 0;
1911 }
1912 } else {
1913 'chop: while data_size != 0 {
1914 let end = data_size;
1915 let mut window = data_size % BYTES_PER_WORD as u32;
1916 if window == 0 {
1917 window = BYTES_PER_WORD as u32;
1918 }
1919 let start = end - window;
1920 let last_word = &value.get_data_section_as_blob()[start as usize..end as usize];
1921 if last_word == [0; 8] {
1922 data_size -= window;
1923 } else {
1924 break 'chop;
1925 }
1926 }
1927 }
1928
1929 while ptr_count != 0 && value.get_pointer_field(ptr_count as usize - 1).is_null() {
1930 ptr_count -= 1;
1931 }
1932 }
1933
1934 let data_words = round_bytes_up_to_words(data_size);
1935 let total_size: WordCount32 = data_words + u32::from(ptr_count) * WORDS_PER_POINTER as u32;
1936
1937 let (ptr, reff, segment_id) =
1938 allocate(arena, reff, segment_id, total_size, WirePointerKind::Struct);
1939 (*reff).set_struct_size_from_pieces(data_words as u16, ptr_count);
1940
1941 if value.data_size == 1 {
1942 if data_size != 0 {
1944 *ptr = u8::from(value.get_bool_field(0))
1945 }
1946 } else {
1947 copy_nonoverlapping_check_zero::<u8>(value.data, ptr, data_size as usize);
1948 }
1949
1950 let pointer_section: *mut WirePointer =
1951 ptr.offset(data_words as isize * BYTES_PER_WORD as isize) as *mut _;
1952 for i in 0..ptr_count as isize {
1953 copy_pointer(
1954 arena,
1955 segment_id,
1956 cap_table,
1957 pointer_section.offset(i),
1958 value.arena,
1959 value.segment_id,
1960 value.cap_table,
1961 value.pointers.offset(i),
1962 value.nesting_limit,
1963 canonicalize,
1964 )?;
1965 }
1966
1967 Ok(SegmentAnd {
1968 segment_id,
1969 value: ptr,
1970 })
1971 }
1972
1973 #[cfg(feature = "alloc")]
1974 pub fn set_capability_pointer(
1975 _arena: &mut dyn BuilderArena,
1976 _segment_id: u32,
1977 mut cap_table: CapTableBuilder,
1978 reff: *mut WirePointer,
1979 cap: alloc::boxed::Box<dyn ClientHook>,
1980 ) {
1981 unsafe {
1983 (*reff).set_cap(cap_table.inject_cap(cap) as u32);
1984 }
1985 }
1986
1987 pub unsafe fn set_list_pointer(
1988 arena: &mut dyn BuilderArena,
1989 segment_id: u32,
1990 cap_table: CapTableBuilder,
1991 reff: *mut WirePointer,
1992 value: ListReader,
1993 canonicalize: bool,
1994 ) -> Result<SegmentAnd<*mut u8>> {
1995 let total_size =
1996 round_bits_up_to_words(u64::from(value.element_count) * u64::from(value.step));
1997
1998 if value.element_size != ElementSize::InlineComposite {
1999 let (ptr, reff, segment_id) =
2001 allocate(arena, reff, segment_id, total_size, WirePointerKind::List);
2002
2003 if value.struct_pointer_count == 1 {
2004 (*reff).set_list_size_and_count(Pointer, value.element_count);
2006 for i in 0..value.element_count as isize {
2007 copy_pointer(
2008 arena,
2009 segment_id,
2010 cap_table,
2011 (ptr as *mut WirePointer).offset(i),
2012 value.arena,
2013 value.segment_id,
2014 value.cap_table,
2015 (value.ptr as *const WirePointer).offset(i),
2016 value.nesting_limit,
2017 canonicalize,
2018 )?;
2019 }
2020 } else {
2021 let element_size = match value.step {
2023 0 => Void,
2024 1 => Bit,
2025 8 => Byte,
2026 16 => TwoBytes,
2027 32 => FourBytes,
2028 64 => EightBytes,
2029 _ => {
2030 panic!("invalid list step size: {}", value.step)
2031 }
2032 };
2033
2034 (*reff).set_list_size_and_count(element_size, value.element_count);
2035
2036 let whole_byte_size =
2041 u64::from(value.element_count) * u64::from(value.step) / BITS_PER_BYTE as u64;
2042 copy_nonoverlapping_check_zero(value.ptr, ptr, whole_byte_size as usize);
2043
2044 let leftover_bits =
2045 u64::from(value.element_count) * u64::from(value.step) % BITS_PER_BYTE as u64;
2046 if leftover_bits > 0 {
2047 let mask: u8 = (1 << leftover_bits as u8) - 1;
2048
2049 *ptr.offset(whole_byte_size as isize) =
2050 mask & (*value.ptr.offset(whole_byte_size as isize))
2051 }
2052 }
2053
2054 Ok(SegmentAnd {
2055 segment_id,
2056 value: ptr,
2057 })
2058 } else {
2059 let decl_data_size = value.struct_data_size / BITS_PER_WORD as u32;
2062 let decl_pointer_count = value.struct_pointer_count;
2063
2064 let mut data_size = 0;
2065 let mut ptr_count = 0;
2066 let mut total_size = total_size;
2067
2068 if canonicalize {
2069 for ec in 0..value.element_count {
2070 let se = value.get_struct_element(ec);
2071 let mut local_data_size = decl_data_size;
2072 'data_chop: while local_data_size != 0 {
2073 let end = local_data_size * BYTES_PER_WORD as u32;
2074 let window = BYTES_PER_WORD as u32;
2075 let start = end - window;
2076 let last_word =
2077 &se.get_data_section_as_blob()[start as usize..end as usize];
2078 if last_word != [0; 8] {
2079 break 'data_chop;
2080 } else {
2081 local_data_size -= 1;
2082 }
2083 }
2084 if local_data_size > data_size {
2085 data_size = local_data_size;
2086 }
2087 let mut local_ptr_count = decl_pointer_count;
2088 while local_ptr_count != 0
2089 && se.get_pointer_field(local_ptr_count as usize - 1).is_null()
2090 {
2091 local_ptr_count -= 1;
2092 }
2093 if local_ptr_count > ptr_count {
2094 ptr_count = local_ptr_count;
2095 }
2096 }
2097 total_size = (data_size + u32::from(ptr_count)) * value.element_count;
2098 } else {
2099 data_size = decl_data_size;
2100 ptr_count = decl_pointer_count;
2101 }
2102
2103 let (ptr, reff, segment_id) = allocate(
2104 arena,
2105 reff,
2106 segment_id,
2107 total_size + POINTER_SIZE_IN_WORDS as u32,
2108 WirePointerKind::List,
2109 );
2110 (*reff).set_list_inline_composite(total_size);
2111
2112 let tag: *mut WirePointer = ptr as *mut _;
2113 (*tag).set_kind_and_inline_composite_list_element_count(
2114 WirePointerKind::Struct,
2115 value.element_count,
2116 );
2117 (*tag).set_struct_size_from_pieces(data_size as u16, ptr_count);
2118 let mut dst = ptr.add(BYTES_PER_WORD);
2119
2120 let mut src: *const u8 = value.ptr;
2121 for _ in 0..value.element_count {
2122 copy_nonoverlapping_check_zero(src, dst, data_size as usize * BYTES_PER_WORD);
2123 dst = dst.offset(data_size as isize * BYTES_PER_WORD as isize);
2124 src = src.offset(decl_data_size as isize * BYTES_PER_WORD as isize);
2125
2126 for _ in 0..ptr_count {
2127 copy_pointer(
2128 arena,
2129 segment_id,
2130 cap_table,
2131 dst as *mut _,
2132 value.arena,
2133 value.segment_id,
2134 value.cap_table,
2135 src as *const WirePointer,
2136 value.nesting_limit,
2137 canonicalize,
2138 )?;
2139 dst = dst.add(BYTES_PER_WORD);
2140 src = src.add(BYTES_PER_WORD);
2141 }
2142
2143 src =
2144 src.offset((decl_pointer_count - ptr_count) as isize * BYTES_PER_WORD as isize);
2145 }
2146 Ok(SegmentAnd {
2147 segment_id,
2148 value: ptr,
2149 })
2150 }
2151 }
2152
2153 #[allow(clippy::too_many_arguments)]
2154 pub unsafe fn copy_pointer(
2155 dst_arena: &mut dyn BuilderArena,
2156 dst_segment_id: u32,
2157 dst_cap_table: CapTableBuilder,
2158 dst: *mut WirePointer,
2159 src_arena: &dyn ReaderArena,
2160 src_segment_id: u32,
2161 src_cap_table: CapTableReader,
2162 src: *const WirePointer,
2163 nesting_limit: i32,
2164 canonicalize: bool,
2165 ) -> Result<SegmentAnd<*mut u8>> {
2166 if (*src).is_null() {
2167 ptr::write_bytes(dst, 0, 1);
2168 return Ok(SegmentAnd {
2169 segment_id: dst_segment_id,
2170 value: ptr::null_mut(),
2171 });
2172 }
2173
2174 let (mut ptr, src, src_segment_id) = follow_fars(src_arena, src, src_segment_id)?;
2175
2176 match (*src).kind() {
2177 WirePointerKind::Struct => {
2178 if nesting_limit <= 0 {
2179 return Err(Error::from_kind(
2180 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2181 ));
2182 }
2183
2184 bounds_check(
2185 src_arena,
2186 src_segment_id,
2187 ptr,
2188 (*src).struct_word_size() as usize,
2189 WirePointerKind::Struct,
2190 )?;
2191
2192 set_struct_pointer(
2193 dst_arena,
2194 dst_segment_id,
2195 dst_cap_table,
2196 dst,
2197 StructReader {
2198 arena: src_arena,
2199 segment_id: src_segment_id,
2200 cap_table: src_cap_table,
2201 data: ptr,
2202 pointers: ptr
2203 .offset((*src).struct_data_size() as isize * BYTES_PER_WORD as isize)
2204 as *const _,
2205 data_size: u32::from((*src).struct_data_size()) * BITS_PER_WORD as u32,
2206 pointer_count: (*src).struct_ptr_count(),
2207 nesting_limit: nesting_limit - 1,
2208 },
2209 canonicalize,
2210 )
2211 }
2212 WirePointerKind::List => {
2213 let element_size = (*src).list_element_size();
2214 if nesting_limit <= 0 {
2215 return Err(Error::from_kind(
2216 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2217 ));
2218 }
2219
2220 if element_size == InlineComposite {
2221 let word_count = (*src).list_inline_composite_word_count();
2222 let tag: *const WirePointer = ptr as *const _;
2223 ptr = ptr.add(BYTES_PER_WORD);
2224
2225 bounds_check(
2226 src_arena,
2227 src_segment_id,
2228 ptr.offset(-(BYTES_PER_WORD as isize)),
2229 word_count as usize + 1,
2230 WirePointerKind::List,
2231 )?;
2232
2233 if (*tag).kind() != WirePointerKind::Struct {
2234 return Err(Error::from_kind(
2235 ErrorKind::InlineCompositeListsOfNonStructTypeAreNotSupported,
2236 ));
2237 }
2238
2239 let element_count = (*tag).inline_composite_list_element_count();
2240 let words_per_element = (*tag).struct_word_size();
2241
2242 if u64::from(words_per_element) * u64::from(element_count)
2243 > u64::from(word_count)
2244 {
2245 return Err(Error::from_kind(
2246 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
2247 ));
2248 }
2249
2250 if words_per_element == 0 {
2251 amplified_read(src_arena, u64::from(element_count))?;
2254 }
2255
2256 set_list_pointer(
2257 dst_arena,
2258 dst_segment_id,
2259 dst_cap_table,
2260 dst,
2261 ListReader {
2262 arena: src_arena,
2263 segment_id: src_segment_id,
2264 cap_table: src_cap_table,
2265 ptr: ptr as *const _,
2266 element_count,
2267 element_size,
2268 step: words_per_element * BITS_PER_WORD as u32,
2269 struct_data_size: u32::from((*tag).struct_data_size())
2270 * BITS_PER_WORD as u32,
2271 struct_pointer_count: (*tag).struct_ptr_count(),
2272 nesting_limit: nesting_limit - 1,
2273 },
2274 canonicalize,
2275 )
2276 } else {
2277 let data_size = data_bits_per_element(element_size);
2278 let pointer_count = pointers_per_element(element_size);
2279 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
2280 let element_count = (*src).list_element_count();
2281 let word_count =
2282 round_bits_up_to_words(u64::from(element_count) * u64::from(step));
2283
2284 bounds_check(
2285 src_arena,
2286 src_segment_id,
2287 ptr,
2288 word_count as usize,
2289 WirePointerKind::List,
2290 )?;
2291
2292 if element_size == Void {
2293 amplified_read(src_arena, u64::from(element_count))?;
2296 }
2297
2298 set_list_pointer(
2299 dst_arena,
2300 dst_segment_id,
2301 dst_cap_table,
2302 dst,
2303 ListReader {
2304 arena: src_arena,
2305 segment_id: src_segment_id,
2306 cap_table: src_cap_table,
2307 ptr: ptr as *const _,
2308 element_count,
2309 element_size,
2310 step,
2311 struct_data_size: data_size,
2312 struct_pointer_count: pointer_count as u16,
2313 nesting_limit: nesting_limit - 1,
2314 },
2315 canonicalize,
2316 )
2317 }
2318 }
2319 WirePointerKind::Far => Err(Error::from_kind(ErrorKind::MalformedDoubleFarPointer)),
2320 WirePointerKind::Other => {
2321 if !(*src).is_capability() {
2322 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
2323 }
2324 if canonicalize {
2325 return Err(Error::from_kind(
2326 ErrorKind::CannotCreateACanonicalMessageWithACapability,
2327 ));
2328 }
2329 #[cfg(feature = "alloc")]
2330 match src_cap_table.extract_cap((*src).cap_index() as usize) {
2331 Some(cap) => {
2332 set_capability_pointer(dst_arena, dst_segment_id, dst_cap_table, dst, cap);
2333 Ok(SegmentAnd {
2334 segment_id: dst_segment_id,
2335 value: ptr::null_mut(),
2336 })
2337 }
2338 None => Err(Error::from_kind(
2339 ErrorKind::MessageContainsInvalidCapabilityPointer,
2340 )),
2341 }
2342 #[cfg(not(feature = "alloc"))]
2343 return Err(Error::from_kind(ErrorKind::UnknownPointerType));
2344 }
2345 }
2346 }
2347
2348 #[inline]
2349 pub unsafe fn read_struct_pointer<'a>(
2350 mut arena: &'a dyn ReaderArena,
2351 mut segment_id: u32,
2352 cap_table: CapTableReader,
2353 mut reff: *const WirePointer,
2354 default: Option<&'a [crate::Word]>,
2355 nesting_limit: i32,
2356 ) -> Result<StructReader<'a>> {
2357 if (*reff).is_null() {
2358 match default {
2359 None => return Ok(StructReader::new_default()),
2360 Some(d) if (*(d.as_ptr() as *const WirePointer)).is_null() => {
2361 return Ok(StructReader::new_default())
2362 }
2363 Some(d) => {
2364 reff = d.as_ptr() as *const _;
2365 arena = &super::NULL_ARENA;
2366 segment_id = 0;
2367 }
2368 }
2369 }
2370
2371 if nesting_limit <= 0 {
2372 return Err(Error::from_kind(
2373 ErrorKind::MessageIsTooDeeplyNestedOrContainsCycles,
2374 ));
2375 }
2376
2377 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2378
2379 let data_size_words = (*reff).struct_data_size();
2380
2381 if (*reff).kind() != WirePointerKind::Struct {
2382 return Err(Error::from_kind(
2383 ErrorKind::MessageContainsNonStructPointerWhereStructPointerWasExpected,
2384 ));
2385 }
2386
2387 bounds_check(
2388 arena,
2389 segment_id,
2390 ptr,
2391 (*reff).struct_word_size() as usize,
2392 WirePointerKind::Struct,
2393 )?;
2394
2395 Ok(StructReader {
2396 arena,
2397 segment_id,
2398 cap_table,
2399 data: ptr,
2400 pointers: ptr.offset(data_size_words as isize * BYTES_PER_WORD as isize) as *const _,
2401 data_size: u32::from(data_size_words) * BITS_PER_WORD as BitCount32,
2402 pointer_count: (*reff).struct_ptr_count(),
2403 nesting_limit: nesting_limit - 1,
2404 })
2405 }
2406
2407 #[inline]
2408 #[cfg(feature = "alloc")]
2409 pub unsafe fn read_capability_pointer(
2410 _arena: &dyn ReaderArena,
2411 _segment_id: u32,
2412 cap_table: CapTableReader,
2413 reff: *const WirePointer,
2414 _nesting_limit: i32,
2415 ) -> Result<alloc::boxed::Box<dyn ClientHook>> {
2416 if (*reff).is_null() {
2417 Err(Error::from_kind(
2418 ErrorKind::MessageContainsNullCapabilityPointer,
2419 ))
2420 } else if !(*reff).is_capability() {
2421 Err(Error::from_kind(
2422 ErrorKind::MessageContainsNonCapabilityPointerWhereCapabilityPointerWasExpected,
2423 ))
2424 } else {
2425 let n = (*reff).cap_index() as usize;
2426 match cap_table.extract_cap(n) {
2427 Some(client_hook) => Ok(client_hook),
2428 None => Err(Error::from_kind(
2429 ErrorKind::MessageContainsInvalidCapabilityPointer,
2430 )),
2431 }
2432 }
2433 }
2434
2435 #[inline]
2436 pub unsafe fn read_list_pointer(
2437 mut arena: &dyn ReaderArena,
2438 mut segment_id: u32,
2439 cap_table: CapTableReader,
2440 mut reff: *const WirePointer,
2441 default_value: *const u8,
2442 expected_element_size: Option<ElementSize>,
2443 nesting_limit: i32,
2444 ) -> Result<ListReader<'_>> {
2445 if (*reff).is_null() {
2446 if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() {
2447 return Ok(ListReader::new_default());
2448 }
2449 reff = default_value as *const _;
2450 arena = &super::NULL_ARENA;
2451 segment_id = 0;
2452 }
2453
2454 if nesting_limit <= 0 {
2455 return Err(Error::from_kind(ErrorKind::NestingLimitExceeded));
2456 }
2457 let (mut ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2458
2459 if (*reff).kind() != WirePointerKind::List {
2460 return Err(Error::from_kind(
2461 ErrorKind::MessageContainsNonListPointerWhereListPointerWasExpected,
2462 ));
2463 }
2464
2465 let element_size = (*reff).list_element_size();
2466 match element_size {
2467 InlineComposite => {
2468 let word_count = (*reff).list_inline_composite_word_count();
2469
2470 let tag: *const WirePointer = ptr as *const WirePointer;
2471
2472 ptr = ptr.add(BYTES_PER_WORD);
2473
2474 bounds_check(
2475 arena,
2476 segment_id,
2477 ptr.offset(-(BYTES_PER_WORD as isize)),
2478 word_count as usize + 1,
2479 WirePointerKind::List,
2480 )?;
2481
2482 if (*tag).kind() != WirePointerKind::Struct {
2483 return Err(Error::from_kind(
2484 ErrorKind::InlineCompositeListsOfNonStructTypeAreNotSupported,
2485 ));
2486 }
2487
2488 let size = (*tag).inline_composite_list_element_count();
2489 let data_size = (*tag).struct_data_size();
2490 let ptr_count = (*tag).struct_ptr_count();
2491 let words_per_element = (*tag).struct_word_size();
2492
2493 if u64::from(size) * u64::from(words_per_element) > u64::from(word_count) {
2494 return Err(Error::from_kind(
2495 ErrorKind::InlineCompositeListsElementsOverrunItsWordCount,
2496 ));
2497 }
2498
2499 if words_per_element == 0 {
2500 amplified_read(arena, u64::from(size))?;
2503 }
2504
2505 match expected_element_size {
2512 None | Some(Void | InlineComposite) => (),
2513 Some(Bit) => {
2514 return Err(Error::from_kind(
2515 ErrorKind::FoundStructListWhereBitListWasExpected,
2516 ));
2517 }
2518 Some(Byte | TwoBytes | FourBytes | EightBytes) => {
2519 if data_size == 0 {
2520 return Err(Error::from_kind(
2521 ErrorKind::ExpectedAPrimitiveListButGotAListOfPointerOnlyStructs,
2522 ));
2523 }
2524 }
2525 Some(Pointer) => {
2526 if ptr_count == 0 {
2527 return Err(Error::from_kind(
2528 ErrorKind::ExpectedAPointerListButGotAListOfDataOnlyStructs,
2529 ));
2530 }
2531 }
2532 }
2533
2534 Ok(ListReader {
2535 arena,
2536 segment_id,
2537 cap_table,
2538 ptr: ptr as *const _,
2539 element_count: size,
2540 element_size,
2541 step: words_per_element * BITS_PER_WORD as u32,
2542 struct_data_size: u32::from(data_size) * (BITS_PER_WORD as u32),
2543 struct_pointer_count: ptr_count,
2544 nesting_limit: nesting_limit - 1,
2545 })
2546 }
2547 _ => {
2548 let data_size = data_bits_per_element((*reff).list_element_size());
2552 let pointer_count = pointers_per_element((*reff).list_element_size());
2553 let element_count = (*reff).list_element_count();
2554 let step = data_size + pointer_count * BITS_PER_POINTER as u32;
2555
2556 let word_count = round_bits_up_to_words(u64::from(element_count) * u64::from(step));
2557 bounds_check(
2558 arena,
2559 segment_id,
2560 ptr,
2561 word_count as usize,
2562 WirePointerKind::List,
2563 )?;
2564
2565 if element_size == Void {
2566 amplified_read(arena, u64::from(element_count))?;
2569 }
2570
2571 if let Some(expected_element_size) = expected_element_size {
2572 if element_size == ElementSize::Bit && expected_element_size != ElementSize::Bit
2573 {
2574 return Err(Error::from_kind(
2575 ErrorKind::FoundBitListWhereStructListWasExpected,
2576 ));
2577 }
2578
2579 let expected_data_bits_per_element =
2585 data_bits_per_element(expected_element_size);
2586 let expected_pointers_per_element = pointers_per_element(expected_element_size);
2587
2588 if expected_data_bits_per_element > data_size
2589 || expected_pointers_per_element > pointer_count
2590 {
2591 return Err(Error::from_kind(
2592 ErrorKind::MessageContainsListWithIncompatibleElementType,
2593 ));
2594 }
2595 }
2596
2597 Ok(ListReader {
2598 arena,
2599 segment_id,
2600 cap_table,
2601 ptr: ptr as *const _,
2602 element_count,
2603 element_size,
2604 step,
2605 struct_data_size: data_size,
2606 struct_pointer_count: pointer_count as u16,
2607 nesting_limit: nesting_limit - 1,
2608 })
2609 }
2610 }
2611 }
2612
2613 #[inline]
2614 pub unsafe fn read_text_pointer<'a>(
2615 mut arena: &'a dyn ReaderArena,
2616 mut segment_id: u32,
2617 mut reff: *const WirePointer,
2618 default: Option<&[crate::Word]>,
2619 ) -> Result<text::Reader<'a>> {
2620 if (*reff).is_null() {
2621 match default {
2622 None => return Ok("".into()),
2623 Some(d) => {
2624 reff = d.as_ptr() as *const WirePointer;
2625 arena = &super::NULL_ARENA;
2626 segment_id = 0;
2627 }
2628 }
2629 }
2630
2631 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2632 let size = (*reff).list_element_count();
2633
2634 if (*reff).kind() != WirePointerKind::List {
2635 return Err(Error::from_kind(
2636 ErrorKind::MessageContainsNonListPointerWhereTextWasExpected,
2637 ));
2638 }
2639
2640 if (*reff).list_element_size() != Byte {
2641 return Err(Error::from_kind(
2642 ErrorKind::MessageContainsListPointerOfNonBytesWhereTextWasExpected,
2643 ));
2644 }
2645
2646 bounds_check(
2647 arena,
2648 segment_id,
2649 ptr,
2650 round_bytes_up_to_words(size) as usize,
2651 WirePointerKind::List,
2652 )?;
2653
2654 if size == 0 {
2655 return Err(Error::from_kind(
2656 ErrorKind::MessageContainsTextThatIsNotNULTerminated,
2657 ));
2658 }
2659
2660 let str_ptr = ptr;
2661
2662 if (*str_ptr.offset((size - 1) as isize)) != 0u8 {
2663 return Err(Error::from_kind(
2664 ErrorKind::MessageContainsTextThatIsNotNULTerminated,
2665 ));
2666 }
2667
2668 Ok(text::Reader(slice::from_raw_parts(
2669 str_ptr,
2670 size as usize - 1,
2671 )))
2672 }
2673
2674 #[inline]
2675 pub unsafe fn read_data_pointer<'a>(
2676 mut arena: &'a dyn ReaderArena,
2677 mut segment_id: u32,
2678 mut reff: *const WirePointer,
2679 default: Option<&'a [crate::Word]>,
2680 ) -> Result<data::Reader<'a>> {
2681 if (*reff).is_null() {
2682 match default {
2683 None => return Ok(&[]),
2684 Some(d) => {
2685 reff = d.as_ptr() as *const WirePointer;
2686 arena = &super::NULL_ARENA;
2687 segment_id = 0;
2688 }
2689 }
2690 }
2691
2692 let (ptr, reff, segment_id) = follow_fars(arena, reff, segment_id)?;
2693
2694 let size: u32 = (*reff).list_element_count();
2695
2696 if (*reff).kind() != WirePointerKind::List {
2697 return Err(Error::from_kind(
2698 ErrorKind::MessageContainsNonListPointerWhereDataWasExpected,
2699 ));
2700 }
2701
2702 if (*reff).list_element_size() != Byte {
2703 return Err(Error::from_kind(
2704 ErrorKind::MessageContainsListPointerOfNonBytesWhereDataWasExpected,
2705 ));
2706 }
2707
2708 bounds_check(
2709 arena,
2710 segment_id,
2711 ptr,
2712 round_bytes_up_to_words(size) as usize,
2713 WirePointerKind::List,
2714 )?;
2715
2716 Ok(data::reader_from_raw_parts(ptr as *const _, size))
2717 }
2718}
2719
2720static ZERO: u64 = 0;
2721fn zero_pointer() -> *const WirePointer {
2722 &ZERO as *const _ as *const _
2723}
2724
2725static NULL_ARENA: NullArena = NullArena;
2726
2727#[cfg(feature = "alloc")]
2728pub type CapTable = alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>>;
2729
2730#[cfg(not(feature = "alloc"))]
2731pub struct CapTable;
2732
2733#[derive(Copy, Clone)]
2734pub enum CapTableReader {
2735 Plain(*const CapTable),
2739}
2740
2741impl Default for CapTableReader {
2742 fn default() -> Self {
2743 CapTableReader::Plain(ptr::null())
2744 }
2745}
2746
2747#[cfg(feature = "alloc")]
2748impl CapTableReader {
2749 pub fn extract_cap(&self, index: usize) -> Option<alloc::boxed::Box<dyn ClientHook>> {
2750 match *self {
2751 Self::Plain(hooks) => {
2752 if hooks.is_null() {
2753 return None;
2754 }
2755 let hooks: &alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2756 unsafe { &*hooks };
2757 if index >= hooks.len() {
2758 None
2759 } else {
2760 hooks[index].as_ref().map(|hook| hook.add_ref())
2761 }
2762 }
2763 }
2764 }
2765}
2766
2767#[derive(Copy, Clone)]
2768pub enum CapTableBuilder {
2769 Plain(*mut CapTable),
2773}
2774
2775impl Default for CapTableBuilder {
2776 fn default() -> Self {
2777 CapTableBuilder::Plain(ptr::null_mut())
2778 }
2779}
2780
2781impl CapTableBuilder {
2782 pub fn into_reader(self) -> CapTableReader {
2783 match self {
2784 Self::Plain(hooks) => CapTableReader::Plain(hooks),
2785 }
2786 }
2787
2788 #[cfg(feature = "alloc")]
2789 pub fn extract_cap(&self, index: usize) -> Option<alloc::boxed::Box<dyn ClientHook>> {
2790 match *self {
2791 Self::Plain(hooks) => {
2792 if hooks.is_null() {
2793 return None;
2794 }
2795 let hooks: &alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2796 unsafe { &*hooks };
2797 if index >= hooks.len() {
2798 None
2799 } else {
2800 hooks[index].as_ref().map(|hook| hook.add_ref())
2801 }
2802 }
2803 }
2804 }
2805
2806 #[cfg(feature = "alloc")]
2807 pub fn inject_cap(&mut self, cap: alloc::boxed::Box<dyn ClientHook>) -> usize {
2808 match *self {
2809 Self::Plain(hooks) => {
2810 if hooks.is_null() {
2811 panic!(
2812 "Called inject_cap() on a null capability table. You need \
2813 to call imbue_mut() on this message before adding capabilities."
2814 );
2815 }
2816 let hooks: &mut alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2817 unsafe { &mut *hooks };
2818 hooks.push(Some(cap));
2819 hooks.len() - 1
2820 }
2821 }
2822 }
2823
2824 #[cfg(feature = "alloc")]
2825 pub fn drop_cap(&mut self, index: usize) {
2826 match *self {
2827 Self::Plain(hooks) => {
2828 if hooks.is_null() {
2829 panic!(
2830 "Called drop_cap() on a null capability table. You need \
2831 to call imbue_mut() on this message before adding capabilities."
2832 );
2833 }
2834 let hooks: &mut alloc::vec::Vec<Option<alloc::boxed::Box<dyn ClientHook>>> =
2835 unsafe { &mut *hooks };
2836 if index < hooks.len() {
2837 hooks[index] = None;
2838 }
2839 }
2840 }
2841 }
2842}
2843
2844#[derive(Clone, Copy)]
2845pub struct PointerReader<'a> {
2846 arena: &'a dyn ReaderArena,
2847 cap_table: CapTableReader,
2848 pointer: *const WirePointer,
2849 segment_id: u32,
2850 nesting_limit: i32,
2851}
2852
2853impl<'a> PointerReader<'a> {
2854 pub fn new_default<'b>() -> PointerReader<'b> {
2855 PointerReader {
2856 arena: &NULL_ARENA,
2857 segment_id: 0,
2858 cap_table: Default::default(),
2859 pointer: ptr::null(),
2860 nesting_limit: 0x7fffffff,
2861 }
2862 }
2863
2864 pub fn get_root(
2865 arena: &'a dyn ReaderArena,
2866 segment_id: u32,
2867 location: *const u8,
2868 nesting_limit: i32,
2869 ) -> Result<Self> {
2870 wire_helpers::bounds_check(
2871 arena,
2872 segment_id,
2873 location as *const _,
2874 POINTER_SIZE_IN_WORDS,
2875 WirePointerKind::Struct,
2876 )?;
2877
2878 Ok(PointerReader {
2879 arena,
2880 segment_id,
2881 cap_table: Default::default(),
2882 pointer: location as *const _,
2883 nesting_limit,
2884 })
2885 }
2886
2887 pub fn reborrow(&self) -> PointerReader<'_> {
2888 PointerReader {
2889 arena: self.arena,
2890 ..*self
2891 }
2892 }
2893
2894 pub unsafe fn get_root_unchecked<'b>(location: *const u8) -> PointerReader<'b> {
2895 PointerReader {
2896 arena: &NULL_ARENA,
2897 segment_id: 0,
2898 cap_table: Default::default(),
2899 pointer: location as *const _,
2900 nesting_limit: 0x7fffffff,
2901 }
2902 }
2903
2904 pub fn imbue(&mut self, cap_table: CapTableReader) {
2905 self.cap_table = cap_table;
2906 }
2907
2908 #[inline]
2909 pub fn is_null(&self) -> bool {
2910 self.pointer.is_null() || unsafe { (*self.pointer).is_null() }
2911 }
2912
2913 pub fn total_size(&self) -> Result<MessageSize> {
2914 if self.pointer.is_null() {
2915 Ok(MessageSize {
2916 word_count: 0,
2917 cap_count: 0,
2918 })
2919 } else {
2920 unsafe {
2921 wire_helpers::total_size(
2922 self.arena,
2923 self.segment_id,
2924 self.pointer,
2925 self.nesting_limit,
2926 )
2927 }
2928 }
2929 }
2930
2931 pub fn get_struct(self, default: Option<&'a [crate::Word]>) -> Result<StructReader<'a>> {
2932 let reff: *const WirePointer = if self.pointer.is_null() {
2933 zero_pointer()
2934 } else {
2935 self.pointer
2936 };
2937 unsafe {
2938 wire_helpers::read_struct_pointer(
2939 self.arena,
2940 self.segment_id,
2941 self.cap_table,
2942 reff,
2943 default,
2944 self.nesting_limit,
2945 )
2946 }
2947 }
2948
2949 pub fn get_list(
2950 self,
2951 expected_element_size: ElementSize,
2952 default: Option<&'a [crate::Word]>,
2953 ) -> Result<ListReader<'a>> {
2954 let default_value: *const u8 = match default {
2955 None => core::ptr::null(),
2956 Some(d) => d.as_ptr() as *const u8,
2957 };
2958 let reff = if self.pointer.is_null() {
2959 zero_pointer()
2960 } else {
2961 self.pointer
2962 };
2963 unsafe {
2964 wire_helpers::read_list_pointer(
2965 self.arena,
2966 self.segment_id,
2967 self.cap_table,
2968 reff,
2969 default_value,
2970 Some(expected_element_size),
2971 self.nesting_limit,
2972 )
2973 }
2974 }
2975
2976 fn get_list_any_size(self, default_value: *const u8) -> Result<ListReader<'a>> {
2977 let reff = if self.pointer.is_null() {
2978 zero_pointer()
2979 } else {
2980 self.pointer
2981 };
2982 unsafe {
2983 wire_helpers::read_list_pointer(
2984 self.arena,
2985 self.segment_id,
2986 self.cap_table,
2987 reff,
2988 default_value,
2989 None,
2990 self.nesting_limit,
2991 )
2992 }
2993 }
2994
2995 pub fn get_text(self, default: Option<&[crate::Word]>) -> Result<text::Reader<'a>> {
2996 let reff = if self.pointer.is_null() {
2997 zero_pointer()
2998 } else {
2999 self.pointer
3000 };
3001 unsafe { wire_helpers::read_text_pointer(self.arena, self.segment_id, reff, default) }
3002 }
3003
3004 pub fn get_data(&self, default: Option<&'a [crate::Word]>) -> Result<data::Reader<'a>> {
3005 let reff = if self.pointer.is_null() {
3006 zero_pointer()
3007 } else {
3008 self.pointer
3009 };
3010 unsafe { wire_helpers::read_data_pointer(self.arena, self.segment_id, reff, default) }
3011 }
3012
3013 #[cfg(feature = "alloc")]
3014 pub fn get_capability(&self) -> Result<alloc::boxed::Box<dyn ClientHook>> {
3015 let reff: *const WirePointer = if self.pointer.is_null() {
3016 zero_pointer()
3017 } else {
3018 self.pointer
3019 };
3020 unsafe {
3021 wire_helpers::read_capability_pointer(
3022 self.arena,
3023 self.segment_id,
3024 self.cap_table,
3025 reff,
3026 self.nesting_limit,
3027 )
3028 }
3029 }
3030
3031 pub fn get_pointer_type(&self) -> Result<PointerType> {
3032 if self.is_null() {
3033 Ok(PointerType::Null)
3034 } else {
3035 let (_, reff, _) =
3036 unsafe { wire_helpers::follow_fars(self.arena, self.pointer, self.segment_id)? };
3037
3038 match unsafe { (*reff).kind() } {
3039 WirePointerKind::Far => Err(Error::from_kind(ErrorKind::UnexpectedFarPointer)),
3040 WirePointerKind::Struct => Ok(PointerType::Struct),
3041 WirePointerKind::List => Ok(PointerType::List),
3042 WirePointerKind::Other => {
3043 if unsafe { (*reff).is_capability() } {
3044 Ok(PointerType::Capability)
3045 } else {
3046 Err(Error::from_kind(ErrorKind::UnknownPointerType))
3047 }
3048 }
3049 }
3050 }
3051 }
3052
3053 pub fn is_canonical(&self, read_head: &Cell<*const u8>) -> Result<bool> {
3054 if self.pointer.is_null() || unsafe { !(*self.pointer).is_positional() } {
3055 return Ok(false);
3056 }
3057
3058 match self.get_pointer_type()? {
3059 PointerType::Null => Ok(true),
3060 PointerType::Struct => {
3061 let mut data_trunc = false;
3062 let mut ptr_trunc = false;
3063 let st = self.get_struct(None)?;
3064 if st.get_data_section_size() == 0 && st.get_pointer_section_size() == 0 {
3065 Ok(self.pointer as *const _ == st.get_location())
3066 } else {
3067 let result =
3068 st.is_canonical(read_head, read_head, &mut data_trunc, &mut ptr_trunc)?;
3069 Ok(result && data_trunc && ptr_trunc)
3070 }
3071 }
3072 PointerType::List => unsafe {
3073 self.get_list_any_size(ptr::null())?
3074 .is_canonical(read_head, self.pointer)
3075 },
3076 PointerType::Capability => Ok(false),
3077 }
3078 }
3079}
3080
3081pub struct PointerBuilder<'a> {
3082 arena: &'a mut dyn BuilderArena,
3083 segment_id: u32,
3084 cap_table: CapTableBuilder,
3085 pointer: *mut WirePointer,
3086}
3087
3088impl<'a> PointerBuilder<'a> {
3089 #[inline]
3090 pub fn get_root(arena: &'a mut dyn BuilderArena, segment_id: u32, location: *mut u8) -> Self {
3091 PointerBuilder {
3092 arena,
3093 cap_table: Default::default(),
3094 segment_id,
3095 pointer: location as *mut _,
3096 }
3097 }
3098
3099 #[inline]
3100 pub fn reborrow(&mut self) -> PointerBuilder<'_> {
3101 PointerBuilder {
3102 arena: self.arena,
3103 ..*self
3104 }
3105 }
3106
3107 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
3108 self.cap_table = cap_table;
3109 }
3110
3111 #[inline]
3112 pub fn is_null(&self) -> bool {
3113 unsafe { (*self.pointer).is_null() }
3114 }
3115
3116 pub fn get_struct(
3117 self,
3118 size: StructSize,
3119 default: Option<&'a [crate::Word]>,
3120 ) -> Result<StructBuilder<'a>> {
3121 unsafe {
3122 wire_helpers::get_writable_struct_pointer(
3123 self.arena,
3124 self.pointer,
3125 self.segment_id,
3126 self.cap_table,
3127 size,
3128 default,
3129 )
3130 }
3131 }
3132
3133 pub fn get_list(
3134 self,
3135 element_size: ElementSize,
3136 default: Option<&'a [crate::Word]>,
3137 ) -> Result<ListBuilder<'a>> {
3138 let default_value: *const u8 = match default {
3139 None => core::ptr::null(),
3140 Some(d) => d.as_ptr() as *const u8,
3141 };
3142 unsafe {
3143 wire_helpers::get_writable_list_pointer(
3144 self.arena,
3145 self.pointer,
3146 self.segment_id,
3147 self.cap_table,
3148 element_size,
3149 default_value,
3150 )
3151 }
3152 }
3153
3154 pub fn get_struct_list(
3155 self,
3156 element_size: StructSize,
3157 default: Option<&'a [crate::Word]>,
3158 ) -> Result<ListBuilder<'a>> {
3159 let default_value: *const u8 = match default {
3160 None => core::ptr::null(),
3161 Some(d) => d.as_ptr() as *const u8,
3162 };
3163 unsafe {
3164 wire_helpers::get_writable_struct_list_pointer(
3165 self.arena,
3166 self.pointer,
3167 self.segment_id,
3168 self.cap_table,
3169 element_size,
3170 default_value,
3171 )
3172 }
3173 }
3174
3175 pub fn get_text(self, default: Option<&'a [crate::Word]>) -> Result<text::Builder<'a>> {
3176 unsafe {
3177 wire_helpers::get_writable_text_pointer(
3178 self.arena,
3179 self.pointer,
3180 self.segment_id,
3181 default,
3182 )
3183 }
3184 }
3185
3186 pub fn get_data(self, default: Option<&'a [crate::Word]>) -> Result<data::Builder<'a>> {
3187 unsafe {
3188 wire_helpers::get_writable_data_pointer(
3189 self.arena,
3190 self.pointer,
3191 self.segment_id,
3192 default,
3193 )
3194 }
3195 }
3196
3197 #[cfg(feature = "alloc")]
3198 pub fn get_capability(&self) -> Result<alloc::boxed::Box<dyn ClientHook>> {
3199 unsafe {
3200 wire_helpers::read_capability_pointer(
3201 self.arena.as_reader(),
3202 self.segment_id,
3203 self.cap_table.into_reader(),
3204 self.pointer,
3205 i32::MAX,
3206 )
3207 }
3208 }
3209
3210 pub fn init_struct(self, size: StructSize) -> StructBuilder<'a> {
3211 unsafe {
3212 wire_helpers::init_struct_pointer(
3213 self.arena,
3214 self.pointer,
3215 self.segment_id,
3216 self.cap_table,
3217 size,
3218 )
3219 }
3220 }
3221
3222 pub fn init_list(
3223 self,
3224 element_size: ElementSize,
3225 element_count: ElementCount32,
3226 ) -> ListBuilder<'a> {
3227 unsafe {
3228 wire_helpers::init_list_pointer(
3229 self.arena,
3230 self.pointer,
3231 self.segment_id,
3232 self.cap_table,
3233 element_count,
3234 element_size,
3235 )
3236 }
3237 }
3238
3239 pub fn init_struct_list(
3240 self,
3241 element_count: ElementCount32,
3242 element_size: StructSize,
3243 ) -> ListBuilder<'a> {
3244 unsafe {
3245 wire_helpers::init_struct_list_pointer(
3246 self.arena,
3247 self.pointer,
3248 self.segment_id,
3249 self.cap_table,
3250 element_count,
3251 element_size,
3252 )
3253 }
3254 }
3255
3256 pub fn init_text(self, size: ByteCount32) -> text::Builder<'a> {
3257 unsafe {
3258 wire_helpers::init_text_pointer(self.arena, self.pointer, self.segment_id, size).value
3259 }
3260 }
3261
3262 pub fn init_data(self, size: ByteCount32) -> data::Builder<'a> {
3263 unsafe {
3264 wire_helpers::init_data_pointer(self.arena, self.pointer, self.segment_id, size).value
3265 }
3266 }
3267
3268 pub fn set_struct(&mut self, value: &StructReader, canonicalize: bool) -> Result<()> {
3269 unsafe {
3270 wire_helpers::set_struct_pointer(
3271 self.arena,
3272 self.segment_id,
3273 self.cap_table,
3274 self.pointer,
3275 *value,
3276 canonicalize,
3277 )?;
3278 Ok(())
3279 }
3280 }
3281
3282 pub fn set_list(&mut self, value: &ListReader, canonicalize: bool) -> Result<()> {
3283 unsafe {
3284 wire_helpers::set_list_pointer(
3285 self.arena,
3286 self.segment_id,
3287 self.cap_table,
3288 self.pointer,
3289 *value,
3290 canonicalize,
3291 )?;
3292 Ok(())
3293 }
3294 }
3295
3296 pub fn set_text(&mut self, value: crate::text::Reader<'_>) {
3297 unsafe {
3298 wire_helpers::set_text_pointer(self.arena, self.pointer, self.segment_id, value);
3299 }
3300 }
3301
3302 pub fn set_data(&mut self, value: &[u8]) {
3303 unsafe {
3304 wire_helpers::set_data_pointer(self.arena, self.pointer, self.segment_id, value);
3305 }
3306 }
3307
3308 #[cfg(feature = "alloc")]
3309 pub fn set_capability(&mut self, cap: alloc::boxed::Box<dyn ClientHook>) {
3310 wire_helpers::set_capability_pointer(
3311 self.arena,
3312 self.segment_id,
3313 self.cap_table,
3314 self.pointer,
3315 cap,
3316 );
3317 }
3318
3319 pub fn copy_from(&mut self, other: PointerReader, canonicalize: bool) -> Result<()> {
3320 if other.pointer.is_null() {
3321 if !self.pointer.is_null() {
3322 unsafe {
3323 wire_helpers::zero_object(self.arena, self.segment_id, self.pointer);
3324 *self.pointer = mem::zeroed();
3325 }
3326 }
3327 } else {
3328 unsafe {
3329 wire_helpers::copy_pointer(
3330 self.arena,
3331 self.segment_id,
3332 self.cap_table,
3333 self.pointer,
3334 other.arena,
3335 other.segment_id,
3336 other.cap_table,
3337 other.pointer,
3338 other.nesting_limit,
3339 canonicalize,
3340 )?;
3341 }
3342 }
3343 Ok(())
3344 }
3345
3346 pub fn clear(&mut self) {
3347 unsafe {
3348 wire_helpers::zero_object(self.arena, self.segment_id, self.pointer);
3349 ptr::write_bytes(self.pointer, 0, 1);
3350 }
3351 }
3352
3353 pub fn as_reader(&self) -> PointerReader<'_> {
3354 PointerReader {
3355 arena: self.arena.as_reader(),
3356 segment_id: self.segment_id,
3357 cap_table: self.cap_table.into_reader(),
3358 pointer: self.pointer,
3359 nesting_limit: 0x7fffffff,
3360 }
3361 }
3362
3363 pub fn into_reader(self) -> PointerReader<'a> {
3364 PointerReader {
3365 arena: self.arena.as_reader(),
3366 segment_id: self.segment_id,
3367 cap_table: self.cap_table.into_reader(),
3368 pointer: self.pointer,
3369 nesting_limit: 0x7fffffff,
3370 }
3371 }
3372}
3373
3374#[derive(Clone, Copy)]
3375pub struct StructReader<'a> {
3376 arena: &'a dyn ReaderArena,
3377 cap_table: CapTableReader,
3378 data: *const u8,
3379 pointers: *const WirePointer,
3380 segment_id: u32,
3381 data_size: BitCount32,
3382 pointer_count: WirePointerCount16,
3383 nesting_limit: i32,
3384}
3385
3386impl<'a> StructReader<'a> {
3387 pub fn new_default<'b>() -> StructReader<'b> {
3388 StructReader {
3389 arena: &NULL_ARENA,
3390 segment_id: 0,
3391 cap_table: Default::default(),
3392 data: ptr::null(),
3393 pointers: ptr::null(),
3394 data_size: 0,
3395 pointer_count: 0,
3396 nesting_limit: 0x7fffffff,
3397 }
3398 }
3399
3400 pub fn imbue(&mut self, cap_table: CapTableReader) {
3401 self.cap_table = cap_table
3402 }
3403
3404 pub fn get_data_section_size(&self) -> BitCount32 {
3405 self.data_size
3406 }
3407
3408 pub fn get_pointer_section_size(&self) -> WirePointerCount16 {
3409 self.pointer_count
3410 }
3411
3412 pub fn get_pointer_section_as_list(&self) -> ListReader<'a> {
3413 ListReader {
3414 arena: self.arena,
3415 segment_id: self.segment_id,
3416 cap_table: self.cap_table,
3417 ptr: self.pointers as *const _,
3418 element_count: u32::from(self.pointer_count),
3419 element_size: ElementSize::Pointer,
3420 step: BITS_PER_WORD as BitCount32,
3421 struct_data_size: 0,
3422 struct_pointer_count: 0,
3423 nesting_limit: self.nesting_limit,
3424 }
3425 }
3426
3427 pub fn get_data_section_as_blob(&self) -> &'a [u8] {
3428 if self.data_size == 0 {
3429 &[]
3432 } else {
3433 unsafe {
3434 ::core::slice::from_raw_parts(self.data, self.data_size as usize / BITS_PER_BYTE)
3435 }
3436 }
3437 }
3438
3439 #[inline]
3440 pub fn get_data_field<T: Primitive + zero::Zero>(&self, offset: ElementCount) -> T {
3441 if (offset + 1) * bits_per_element::<T>() <= self.data_size as usize {
3445 let dwv: *const <T as Primitive>::Raw = self.data as *const _;
3446 unsafe { <T as Primitive>::get(&*dwv.add(offset)) }
3447 } else {
3448 T::zero()
3449 }
3450 }
3451
3452 #[inline]
3453 pub fn get_bool_field(&self, offset: ElementCount) -> bool {
3454 let boffset: BitCount32 = offset as BitCount32;
3455 if boffset < self.data_size {
3456 unsafe {
3457 let b: *const u8 = self.data.add(boffset as usize / BITS_PER_BYTE);
3458 ((*b) & (1u8 << (boffset % BITS_PER_BYTE as u32) as usize)) != 0
3459 }
3460 } else {
3461 false
3462 }
3463 }
3464
3465 #[inline]
3466 pub fn get_data_field_mask<T: Primitive + zero::Zero + Mask>(
3467 &self,
3468 offset: ElementCount,
3469 mask: <T as Mask>::T,
3470 ) -> T {
3471 Mask::mask(self.get_data_field(offset), mask)
3472 }
3473
3474 #[inline]
3475 pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool {
3476 self.get_bool_field(offset) ^ mask
3477 }
3478
3479 #[inline]
3480 pub fn get_pointer_field(&self, ptr_index: WirePointerCount) -> PointerReader<'a> {
3481 if ptr_index < self.pointer_count as WirePointerCount {
3482 PointerReader {
3483 arena: self.arena,
3484 segment_id: self.segment_id,
3485 cap_table: self.cap_table,
3486 pointer: unsafe { self.pointers.add(ptr_index) },
3487 nesting_limit: self.nesting_limit,
3488 }
3489 } else {
3490 PointerReader::new_default()
3491 }
3492 }
3493
3494 #[inline]
3495 pub fn is_pointer_field_null(&self, ptr_index: WirePointerCount) -> bool {
3496 self.get_pointer_field(ptr_index).is_null()
3497 }
3498
3499 pub fn total_size(&self) -> Result<MessageSize> {
3500 let mut result = MessageSize {
3501 word_count: u64::from(wire_helpers::round_bits_up_to_words(u64::from(
3502 self.data_size,
3503 ))) + u64::from(self.pointer_count) * WORDS_PER_POINTER as u64,
3504 cap_count: 0,
3505 };
3506
3507 for i in 0..self.pointer_count as isize {
3508 unsafe {
3509 result += wire_helpers::total_size(
3510 self.arena,
3511 self.segment_id,
3512 self.pointers.offset(i),
3513 self.nesting_limit,
3514 )?;
3515 }
3516 }
3517
3518 Ok(result)
3521 }
3522
3523 fn get_location(&self) -> *const u8 {
3524 self.data
3525 }
3526
3527 pub fn is_canonical(
3528 &self,
3529 read_head: &Cell<*const u8>,
3530 ptr_head: &Cell<*const u8>,
3531 data_trunc: &mut bool,
3532 ptr_trunc: &mut bool,
3533 ) -> Result<bool> {
3534 if self.get_location() != read_head.get() {
3535 return Ok(false);
3536 }
3537
3538 if self.get_data_section_size() % BITS_PER_WORD as u32 != 0 {
3539 return Ok(false);
3541 }
3542
3543 let data_size = self.get_data_section_size() / BITS_PER_WORD as u32;
3544
3545 if data_size != 0 {
3547 *data_trunc = self.get_data_field::<u64>((data_size - 1) as usize) != 0;
3548 } else {
3549 *data_trunc = true;
3550 }
3551
3552 if self.pointer_count != 0 {
3553 *ptr_trunc = !self
3554 .get_pointer_field(self.pointer_count as usize - 1)
3555 .is_null();
3556 } else {
3557 *ptr_trunc = true;
3558 }
3559
3560 read_head.set(unsafe {
3561 (read_head.get()).offset(
3562 (data_size as isize + self.pointer_count as isize) * (BYTES_PER_WORD as isize),
3563 )
3564 });
3565
3566 for ptr_idx in 0..self.pointer_count {
3567 if !self
3568 .get_pointer_field(ptr_idx as usize)
3569 .is_canonical(ptr_head)?
3570 {
3571 return Ok(false);
3572 }
3573 }
3574
3575 Ok(true)
3576 }
3577}
3578
3579pub struct StructBuilder<'a> {
3580 arena: &'a mut dyn BuilderArena,
3581 cap_table: CapTableBuilder,
3582 data: *mut u8,
3583 pointers: *mut WirePointer,
3584 segment_id: u32,
3585 data_size: BitCount32,
3586 pointer_count: WirePointerCount16,
3587}
3588
3589impl<'a> StructBuilder<'a> {
3590 #[inline]
3591 pub fn reborrow(&mut self) -> StructBuilder<'_> {
3592 StructBuilder {
3593 arena: self.arena,
3594 ..*self
3595 }
3596 }
3597
3598 pub fn as_reader(&self) -> StructReader<'_> {
3599 StructReader {
3600 arena: self.arena.as_reader(),
3601 cap_table: self.cap_table.into_reader(),
3602 data: self.data,
3603 pointers: self.pointers,
3604 pointer_count: self.pointer_count,
3605 segment_id: self.segment_id,
3606 data_size: self.data_size,
3607 nesting_limit: 0x7fffffff,
3608 }
3609 }
3610
3611 pub fn into_reader(self) -> StructReader<'a> {
3612 StructReader {
3613 arena: self.arena.as_reader(),
3614 cap_table: self.cap_table.into_reader(),
3615 data: self.data,
3616 pointers: self.pointers,
3617 pointer_count: self.pointer_count,
3618 segment_id: self.segment_id,
3619 data_size: self.data_size,
3620 nesting_limit: 0x7fffffff,
3621 }
3622 }
3623
3624 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
3625 self.cap_table = cap_table
3626 }
3627
3628 #[inline]
3629 pub fn set_data_field<T: Primitive>(&self, offset: ElementCount, value: T) {
3630 let ptr: *mut <T as Primitive>::Raw = self.data as *mut _;
3631 unsafe { <T as Primitive>::set(&mut *ptr.add(offset), value) }
3632 }
3633
3634 #[inline]
3635 pub fn set_data_field_mask<T: Primitive + Mask>(
3636 &self,
3637 offset: ElementCount,
3638 value: T,
3639 mask: <T as Mask>::T,
3640 ) {
3641 self.set_data_field(offset, Mask::mask(value, mask));
3642 }
3643
3644 #[inline]
3645 pub fn get_data_field<T: Primitive>(&self, offset: ElementCount) -> T {
3646 let ptr: *const <T as Primitive>::Raw = self.data as *const _;
3647 unsafe { <T as Primitive>::get(&*ptr.add(offset)) }
3648 }
3649
3650 #[inline]
3651 pub fn get_data_field_mask<T: Primitive + Mask>(
3652 &self,
3653 offset: ElementCount,
3654 mask: <T as Mask>::T,
3655 ) -> T {
3656 Mask::mask(self.get_data_field(offset), mask)
3657 }
3658
3659 #[inline]
3660 pub fn set_bool_field(&self, offset: ElementCount, value: bool) {
3661 let boffset: BitCount0 = offset;
3664 let b = unsafe { self.data.add(boffset / BITS_PER_BYTE) };
3665 let bitnum = boffset % BITS_PER_BYTE;
3666 unsafe { (*b) = ((*b) & !(1 << bitnum)) | (u8::from(value) << bitnum) }
3667 }
3668
3669 #[inline]
3670 pub fn set_bool_field_mask(&self, offset: ElementCount, value: bool, mask: bool) {
3671 self.set_bool_field(offset, value ^ mask);
3672 }
3673
3674 #[inline]
3675 pub fn get_bool_field(&self, offset: ElementCount) -> bool {
3676 let boffset: BitCount0 = offset;
3677 let b = unsafe { self.data.add(boffset / BITS_PER_BYTE) };
3678 unsafe { ((*b) & (1 << (boffset % BITS_PER_BYTE))) != 0 }
3679 }
3680
3681 #[inline]
3682 pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool {
3683 self.get_bool_field(offset) ^ mask
3684 }
3685
3686 #[inline]
3687 pub fn get_pointer_field(self, ptr_index: WirePointerCount) -> PointerBuilder<'a> {
3688 PointerBuilder {
3689 arena: self.arena,
3690 segment_id: self.segment_id,
3691 cap_table: self.cap_table,
3692 pointer: unsafe { self.pointers.add(ptr_index) },
3693 }
3694 }
3695
3696 #[inline]
3697 pub fn is_pointer_field_null(&self, ptr_index: WirePointerCount) -> bool {
3698 unsafe { (*self.pointers.add(ptr_index)).is_null() }
3699 }
3700
3701 pub fn copy_content_from(&mut self, other: &StructReader) -> Result<()> {
3702 use core::cmp::min;
3703 let shared_data_size = min(self.data_size, other.data_size);
3705 let shared_pointer_count = min(self.pointer_count, other.pointer_count);
3706
3707 if (shared_data_size > 0 && other.data == self.data)
3708 || (shared_pointer_count > 0 && other.pointers == self.pointers)
3709 {
3710 if (shared_data_size == 0 || other.data == self.data)
3713 && (shared_pointer_count == 0 || other.pointers == self.pointers)
3714 {
3715 return Err(Error::from_kind(
3716 ErrorKind::OnlyOneOfTheSectionPointersIsPointingToOurself,
3717 ));
3718 }
3719
3720 return Ok(());
3722 }
3723
3724 unsafe {
3725 if self.data_size > shared_data_size {
3726 if self.data_size == 1 {
3729 self.set_bool_field(0, false);
3730 } else {
3731 let unshared = self
3732 .data
3733 .offset((shared_data_size / BITS_PER_BYTE as u32) as isize);
3734 ptr::write_bytes(
3735 unshared,
3736 0,
3737 ((self.data_size - shared_data_size) / BITS_PER_BYTE as u32) as usize,
3738 );
3739 }
3740 }
3741
3742 if shared_data_size == 1 {
3744 self.set_bool_field(0, other.get_bool_field(0));
3745 } else {
3746 wire_helpers::copy_nonoverlapping_check_zero(
3747 other.data,
3748 self.data,
3749 (shared_data_size / BITS_PER_BYTE as u32) as usize,
3750 );
3751 }
3752
3753 for i in 0..self.pointer_count as isize {
3755 wire_helpers::zero_object(
3756 self.arena,
3757 self.segment_id,
3758 self.pointers.offset(i) as *mut _,
3759 );
3760 }
3761 ptr::write_bytes(self.pointers, 0u8, self.pointer_count as usize);
3762
3763 for i in 0..shared_pointer_count as isize {
3764 wire_helpers::copy_pointer(
3765 self.arena,
3766 self.segment_id,
3767 self.cap_table,
3768 self.pointers.offset(i),
3769 other.arena,
3770 other.segment_id,
3771 other.cap_table,
3772 other.pointers.offset(i),
3773 other.nesting_limit,
3774 false,
3775 )?;
3776 }
3777 }
3778
3779 Ok(())
3780 }
3781}
3782
3783#[derive(Clone, Copy)]
3784pub struct ListReader<'a> {
3785 arena: &'a dyn ReaderArena,
3786 cap_table: CapTableReader,
3787 ptr: *const u8,
3788 segment_id: u32,
3789 element_count: ElementCount32,
3790 step: BitCount32,
3791 struct_data_size: BitCount32,
3792 nesting_limit: i32,
3793 struct_pointer_count: WirePointerCount16,
3794 element_size: ElementSize,
3795}
3796
3797impl<'a> ListReader<'a> {
3798 pub fn new_default<'b>() -> ListReader<'b> {
3799 ListReader {
3800 arena: &NULL_ARENA,
3801 segment_id: 0,
3802 cap_table: Default::default(),
3803 ptr: ptr::null(),
3804 element_count: 0,
3805 element_size: ElementSize::Void,
3806 step: 0,
3807 struct_data_size: 0,
3808 struct_pointer_count: 0,
3809 nesting_limit: 0x7fffffff,
3810 }
3811 }
3812
3813 pub fn imbue(&mut self, cap_table: CapTableReader) {
3814 self.cap_table = cap_table
3815 }
3816
3817 #[inline]
3818 pub fn len(&self) -> ElementCount32 {
3819 self.element_count
3820 }
3821
3822 pub fn is_empty(&self) -> bool {
3823 self.len() == 0
3824 }
3825
3826 pub(crate) fn get_step_size_in_bits(&self) -> u32 {
3827 self.step
3828 }
3829
3830 pub(crate) fn get_element_size(&self) -> ElementSize {
3831 self.element_size
3832 }
3833
3834 pub(crate) fn into_raw_bytes(self) -> &'a [u8] {
3835 if self.element_count == 0 {
3836 &[]
3839 } else {
3840 let num_bytes = wire_helpers::round_bits_up_to_bytes(
3841 u64::from(self.step) * u64::from(self.element_count),
3842 ) as usize;
3843 unsafe { ::core::slice::from_raw_parts(self.ptr, num_bytes) }
3844 }
3845 }
3846
3847 #[inline]
3848 pub fn get_struct_element(&self, index: ElementCount32) -> StructReader<'a> {
3849 let index_byte: ByteCount32 =
3850 ((u64::from(index) * u64::from(self.step)) / BITS_PER_BYTE as u64) as u32;
3851
3852 let struct_data: *const u8 = unsafe { self.ptr.offset(index_byte as isize) };
3853
3854 let struct_pointers: *const WirePointer =
3855 unsafe { struct_data.add(self.struct_data_size as usize / BITS_PER_BYTE) as *const _ };
3856
3857 StructReader {
3858 arena: self.arena,
3859 segment_id: self.segment_id,
3860 cap_table: self.cap_table,
3861 data: struct_data,
3862 pointers: struct_pointers,
3863 data_size: self.struct_data_size,
3864 pointer_count: self.struct_pointer_count,
3865 nesting_limit: self.nesting_limit - 1,
3866 }
3867 }
3868
3869 #[inline]
3870 pub fn get_pointer_element(self, index: ElementCount32) -> PointerReader<'a> {
3871 let offset = (self.struct_data_size as u64 / BITS_PER_BYTE as u64
3872 + u64::from(index) * u64::from(self.step) / BITS_PER_BYTE as u64)
3873 as isize;
3874 PointerReader {
3875 arena: self.arena,
3876 segment_id: self.segment_id,
3877 cap_table: self.cap_table,
3878 pointer: unsafe { self.ptr.offset(offset) } as *const _,
3879 nesting_limit: self.nesting_limit,
3880 }
3881 }
3882
3883 pub unsafe fn is_canonical(
3884 &self,
3885 read_head: &Cell<*const u8>,
3886 reff: *const WirePointer,
3887 ) -> Result<bool> {
3888 match self.element_size {
3889 ElementSize::InlineComposite => {
3890 read_head.set(unsafe { read_head.get().add(BYTES_PER_WORD) }); if self.ptr as *const _ != read_head.get() {
3892 return Ok(false);
3893 }
3894 if self.struct_data_size % BITS_PER_WORD as u32 != 0 {
3895 return Ok(false);
3896 }
3897 let struct_size = (self.struct_data_size / BITS_PER_WORD as u32)
3898 + u32::from(self.struct_pointer_count);
3899 let word_count = unsafe { (*reff).list_inline_composite_word_count() };
3900 if struct_size * self.element_count != word_count {
3901 return Ok(false);
3902 }
3903 if struct_size == 0 {
3904 return Ok(true);
3905 }
3906 let list_end = unsafe {
3907 read_head
3908 .get()
3909 .add((self.element_count * struct_size) as usize * BYTES_PER_WORD)
3910 };
3911 let pointer_head = Cell::new(list_end);
3912 let mut list_data_trunc = false;
3913 let mut list_ptr_trunc = false;
3914 for idx in 0..self.element_count {
3915 let mut data_trunc = false;
3916 let mut ptr_trunc = false;
3917 if !self.get_struct_element(idx).is_canonical(
3918 read_head,
3919 &pointer_head,
3920 &mut data_trunc,
3921 &mut ptr_trunc,
3922 )? {
3923 return Ok(false);
3924 }
3925 list_data_trunc |= data_trunc;
3926 list_ptr_trunc |= ptr_trunc;
3927 }
3928 assert_eq!(read_head.get(), list_end);
3929 read_head.set(pointer_head.get());
3930 Ok(list_data_trunc && list_ptr_trunc)
3931 }
3932 ElementSize::Pointer => {
3933 if self.ptr as *const _ != read_head.get() {
3934 return Ok(false);
3935 }
3936 read_head.set(unsafe {
3937 read_head
3938 .get()
3939 .offset(self.element_count as isize * BYTES_PER_WORD as isize)
3940 });
3941 for idx in 0..self.element_count {
3942 if !self.get_pointer_element(idx).is_canonical(read_head)? {
3943 return Ok(false);
3944 }
3945 }
3946 Ok(true)
3947 }
3948 element_size => {
3949 if self.ptr != read_head.get() as *const _ {
3950 return Ok(false);
3951 }
3952 let bit_size =
3953 u64::from(self.element_count) * u64::from(data_bits_per_element(element_size));
3954 let mut word_size = bit_size / BITS_PER_WORD as u64;
3955 if bit_size % BITS_PER_WORD as u64 != 0 {
3956 word_size += 1
3957 }
3958
3959 let byte_size = bit_size / BITS_PER_BYTE as u64;
3960 let mut byte_read_head: *const u8 = read_head.get();
3961 byte_read_head = unsafe { byte_read_head.offset(byte_size as isize) };
3962 let read_head_end = unsafe {
3963 read_head
3964 .get()
3965 .offset(word_size as isize * BYTES_PER_WORD as isize)
3966 };
3967
3968 let leftover_bits = bit_size % BITS_PER_BYTE as u64;
3969 if leftover_bits > 0 {
3970 let mask: u8 = !((1 << leftover_bits as u8) - 1);
3971 let partial_byte = unsafe { *byte_read_head };
3972
3973 if partial_byte & mask != 0 {
3974 return Ok(false);
3975 }
3976 byte_read_head = unsafe { byte_read_head.offset(1_isize) };
3977 }
3978
3979 while byte_read_head != read_head_end {
3980 if unsafe { *byte_read_head } != 0 {
3981 return Ok(false);
3982 }
3983 byte_read_head = unsafe { byte_read_head.offset(1_isize) };
3984 }
3985
3986 read_head.set(read_head_end);
3987 Ok(true)
3988 }
3989 }
3990 }
3991}
3992
3993pub struct ListBuilder<'a> {
3994 arena: &'a mut dyn BuilderArena,
3995 cap_table: CapTableBuilder,
3996 ptr: *mut u8,
3997 segment_id: u32,
3998 element_count: ElementCount32,
3999 step: BitCount32,
4000 struct_data_size: BitCount32,
4001 struct_pointer_count: WirePointerCount16,
4002 element_size: ElementSize,
4003}
4004
4005impl<'a> ListBuilder<'a> {
4006 #[inline]
4007 pub fn new_default(arena: &mut dyn BuilderArena) -> ListBuilder<'_> {
4008 ListBuilder {
4009 arena,
4010 segment_id: 0,
4011 cap_table: Default::default(),
4012 ptr: ptr::null_mut(),
4013 element_count: 0,
4014 element_size: ElementSize::Void,
4015 step: 0,
4016 struct_data_size: 0,
4017 struct_pointer_count: 0,
4018 }
4019 }
4020
4021 pub fn into_reader(self) -> ListReader<'a> {
4022 ListReader {
4023 arena: self.arena.as_reader(),
4024 segment_id: self.segment_id,
4025 cap_table: self.cap_table.into_reader(),
4026 ptr: self.ptr as *const _,
4027 element_count: self.element_count,
4028 element_size: self.element_size,
4029 step: self.step,
4030 struct_data_size: self.struct_data_size,
4031 struct_pointer_count: self.struct_pointer_count,
4032 nesting_limit: 0x7fffffff,
4033 }
4034 }
4035
4036 #[inline]
4037 pub fn reborrow(&mut self) -> ListBuilder<'_> {
4038 ListBuilder {
4039 arena: self.arena,
4040 ..*self
4041 }
4042 }
4043
4044 pub fn imbue(&mut self, cap_table: CapTableBuilder) {
4045 self.cap_table = cap_table
4046 }
4047
4048 #[inline]
4049 pub fn len(&self) -> ElementCount32 {
4050 self.element_count
4051 }
4052
4053 pub fn is_empty(&self) -> bool {
4054 self.len() == 0
4055 }
4056
4057 #[inline]
4058 pub fn get_struct_element(self, index: ElementCount32) -> StructBuilder<'a> {
4059 let index_byte = ((u64::from(index) * u64::from(self.step)) / BITS_PER_BYTE as u64) as u32;
4060 let struct_data = unsafe { self.ptr.offset(index_byte as isize) };
4061 let struct_pointers =
4062 unsafe { struct_data.add((self.struct_data_size as usize) / BITS_PER_BYTE) as *mut _ };
4063 StructBuilder {
4064 arena: self.arena,
4065 segment_id: self.segment_id,
4066 cap_table: self.cap_table,
4067 data: struct_data,
4068 pointers: struct_pointers,
4069 data_size: self.struct_data_size,
4070 pointer_count: self.struct_pointer_count,
4071 }
4072 }
4073
4074 pub(crate) fn get_element_size(&self) -> ElementSize {
4075 self.element_size
4076 }
4077
4078 #[inline]
4079 pub fn get_pointer_element(self, index: ElementCount32) -> PointerBuilder<'a> {
4080 let offset = (u64::from(index) * u64::from(self.step) / BITS_PER_BYTE as u64) as u32;
4081 PointerBuilder {
4082 arena: self.arena,
4083 segment_id: self.segment_id,
4084 cap_table: self.cap_table,
4085 pointer: unsafe { self.ptr.offset(offset as isize) } as *mut _,
4086 }
4087 }
4088
4089 pub(crate) fn as_raw_bytes(&self) -> &'a mut [u8] {
4090 if self.element_count == 0 {
4091 &mut []
4094 } else {
4095 let num_bytes = wire_helpers::round_bits_up_to_bytes(
4096 u64::from(self.step) * u64::from(self.element_count),
4097 ) as usize;
4098 unsafe { ::core::slice::from_raw_parts_mut(self.ptr, num_bytes) }
4099 }
4100 }
4101}
4102
4103pub trait PrimitiveElement {
4107 fn get(list_reader: &ListReader, index: ElementCount32) -> Self;
4109
4110 fn get_from_builder(list_builder: &ListBuilder, index: ElementCount32) -> Self;
4112
4113 fn set(list_builder: &ListBuilder, index: ElementCount32, value: Self);
4115
4116 fn element_size() -> ElementSize;
4118}
4119
4120impl<T: Primitive> PrimitiveElement for T {
4121 #[inline]
4122 fn get(list_reader: &ListReader, index: ElementCount32) -> Self {
4123 let offset = (u64::from(index) * u64::from(list_reader.step) / BITS_PER_BYTE as u64) as u32;
4124 unsafe {
4125 let ptr: *const u8 = list_reader.ptr.offset(offset as isize);
4126 <Self as Primitive>::get(&*(ptr as *const <Self as Primitive>::Raw))
4127 }
4128 }
4129
4130 #[inline]
4131 fn get_from_builder(list_builder: &ListBuilder, index: ElementCount32) -> Self {
4132 let offset =
4133 (u64::from(index) * u64::from(list_builder.step) / BITS_PER_BYTE as u64) as u32;
4134 unsafe {
4135 let ptr: *mut <Self as Primitive>::Raw =
4136 list_builder.ptr.offset(offset as isize) as *mut _;
4137 <Self as Primitive>::get(&*ptr)
4138 }
4139 }
4140
4141 #[inline]
4142 fn set(list_builder: &ListBuilder, index: ElementCount32, value: Self) {
4143 let offset =
4144 (u64::from(index) * u64::from(list_builder.step) / BITS_PER_BYTE as u64) as u32;
4145 unsafe {
4146 let ptr: *mut <Self as Primitive>::Raw =
4147 list_builder.ptr.offset(offset as isize) as *mut _;
4148 <Self as Primitive>::set(&mut *ptr, value);
4149 }
4150 }
4151
4152 fn element_size() -> ElementSize {
4153 match mem::size_of::<Self>() {
4154 0 => Void,
4155 1 => Byte,
4156 2 => TwoBytes,
4157 4 => FourBytes,
4158 8 => EightBytes,
4159 _ => unreachable!(),
4160 }
4161 }
4162}
4163
4164impl PrimitiveElement for bool {
4165 #[inline]
4166 fn get(list: &ListReader, index: ElementCount32) -> Self {
4167 let bindex = u64::from(index) * u64::from(list.step);
4168 unsafe {
4169 let b: *const u8 = list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize);
4170 ((*b) & (1 << (bindex % BITS_PER_BYTE as u64))) != 0
4171 }
4172 }
4173 #[inline]
4174 fn get_from_builder(list: &ListBuilder, index: ElementCount32) -> Self {
4175 let bindex = u64::from(index) * u64::from(list.step);
4176 let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize) };
4177 unsafe { ((*b) & (1 << (bindex % BITS_PER_BYTE as u64))) != 0 }
4178 }
4179 #[inline]
4180 fn set(list: &ListBuilder, index: ElementCount32, value: Self) {
4181 let bindex = u64::from(index) * u64::from(list.step);
4182 let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE as u64) as isize) };
4183
4184 let bitnum = bindex % BITS_PER_BYTE as u64;
4185 unsafe { (*b) = ((*b) & !(1 << bitnum)) | (u8::from(value) << bitnum) }
4186 }
4187 fn element_size() -> ElementSize {
4188 Bit
4189 }
4190}
4191
4192impl PrimitiveElement for () {
4193 #[inline]
4194 fn get(_list: &ListReader, _index: ElementCount32) {}
4195
4196 #[inline]
4197 fn get_from_builder(_list: &ListBuilder, _index: ElementCount32) {}
4198
4199 #[inline]
4200 fn set(_list: &ListBuilder, _index: ElementCount32, _value: ()) {}
4201
4202 fn element_size() -> ElementSize {
4203 Void
4204 }
4205}