1use crate::{
37    DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex, FuncIndex,
38    FuncRefIndex, GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex, TagIndex,
39};
40use cranelift_entity::packed_option::ReservedValue;
41
42#[cfg(target_pointer_width = "32")]
43fn cast_to_u32(sz: usize) -> u32 {
44    u32::try_from(sz).unwrap()
45}
46#[cfg(target_pointer_width = "64")]
47fn cast_to_u32(sz: usize) -> u32 {
48    u32::try_from(sz).expect("overflow in cast from usize to u32")
49}
50
51#[inline]
53fn align(offset: u32, width: u32) -> u32 {
54    (offset + (width - 1)) / width * width
55}
56
57#[derive(Debug, Clone, Copy)]
60pub struct VMOffsets<P> {
61    pub ptr: P,
63    pub num_imported_functions: u32,
65    pub num_imported_tables: u32,
67    pub num_imported_memories: u32,
69    pub num_imported_globals: u32,
71    pub num_imported_tags: u32,
73    pub num_defined_tables: u32,
75    pub num_defined_memories: u32,
77    pub num_owned_memories: u32,
79    pub num_defined_globals: u32,
81    pub num_defined_tags: u32,
83    pub num_escaped_funcs: u32,
86
87    imported_functions: u32,
89    imported_tables: u32,
90    imported_memories: u32,
91    imported_globals: u32,
92    imported_tags: u32,
93    defined_tables: u32,
94    defined_memories: u32,
95    owned_memories: u32,
96    defined_globals: u32,
97    defined_tags: u32,
98    defined_func_refs: u32,
99    size: u32,
100}
101
102pub trait PtrSize {
104    fn size(&self) -> u8;
106
107    fn vmcontext_store_context(&self) -> u8 {
109        u8::try_from(align(
110            u32::try_from(core::mem::size_of::<u32>()).unwrap(),
111            u32::from(self.size()),
112        ))
113        .unwrap()
114    }
115
116    fn vmcontext_builtin_functions(&self) -> u8 {
118        self.vmcontext_store_context() + self.size()
119    }
120
121    #[inline]
123    fn vm_func_ref_array_call(&self) -> u8 {
124        0 * self.size()
125    }
126
127    #[inline]
129    fn vm_func_ref_wasm_call(&self) -> u8 {
130        1 * self.size()
131    }
132
133    #[inline]
135    fn vm_func_ref_type_index(&self) -> u8 {
136        2 * self.size()
137    }
138
139    #[inline]
141    fn vm_func_ref_vmctx(&self) -> u8 {
142        3 * self.size()
143    }
144
145    #[inline]
147    fn size_of_vm_func_ref(&self) -> u8 {
148        4 * self.size()
149    }
150
151    #[inline]
154    fn size_of_vmglobal_definition(&self) -> u8 {
155        16
156    }
157
158    #[inline]
160    fn size_of_vmtag_definition(&self) -> u8 {
161        4
162    }
163
164    #[inline]
166    fn maximum_value_size(&self) -> u8 {
167        self.size_of_vmglobal_definition()
168    }
169
170    #[inline]
174    fn vmstore_context_fuel_consumed(&self) -> u8 {
175        0
176    }
177
178    #[inline]
180    fn vmstore_context_epoch_deadline(&self) -> u8 {
181        self.vmstore_context_fuel_consumed() + 8
182    }
183
184    #[inline]
186    fn vmstore_context_stack_limit(&self) -> u8 {
187        self.vmstore_context_epoch_deadline() + 8
188    }
189
190    #[inline]
192    fn vmstore_context_gc_heap(&self) -> u8 {
193        self.vmstore_context_stack_limit() + self.size()
194    }
195
196    fn vmstore_context_gc_heap_base(&self) -> u8 {
198        let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_base();
199        debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
200        offset
201    }
202
203    fn vmstore_context_gc_heap_current_length(&self) -> u8 {
205        let offset = self.vmstore_context_gc_heap() + self.vmmemory_definition_current_length();
206        debug_assert!(offset < self.vmstore_context_last_wasm_exit_trampoline_fp());
207        offset
208    }
209
210    fn vmstore_context_last_wasm_exit_trampoline_fp(&self) -> u8 {
213        self.vmstore_context_gc_heap() + self.size_of_vmmemory_definition()
214    }
215
216    fn vmstore_context_last_wasm_exit_pc(&self) -> u8 {
218        self.vmstore_context_last_wasm_exit_trampoline_fp() + self.size()
219    }
220
221    fn vmstore_context_last_wasm_entry_sp(&self) -> u8 {
223        self.vmstore_context_last_wasm_exit_pc() + self.size()
224    }
225
226    fn vmstore_context_last_wasm_entry_fp(&self) -> u8 {
228        self.vmstore_context_last_wasm_entry_sp() + self.size()
229    }
230
231    fn vmstore_context_last_wasm_entry_trap_handler(&self) -> u8 {
233        self.vmstore_context_last_wasm_entry_fp() + self.size()
234    }
235
236    fn vmstore_context_stack_chain(&self) -> u8 {
238        self.vmstore_context_last_wasm_entry_trap_handler() + self.size()
239    }
240
241    #[inline]
245    fn vmmemory_definition_base(&self) -> u8 {
246        0 * self.size()
247    }
248
249    #[inline]
251    fn vmmemory_definition_current_length(&self) -> u8 {
252        1 * self.size()
253    }
254
255    #[inline]
257    fn size_of_vmmemory_definition(&self) -> u8 {
258        2 * self.size()
259    }
260
261    #[inline]
263    fn size_of_vmmemory_pointer(&self) -> u8 {
264        self.size()
265    }
266
267    fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
271        u8::try_from(align(
272            u32::try_from(core::mem::size_of::<u32>()).unwrap(),
273            u32::from(self.size()),
274        ))
275        .unwrap()
276    }
277
278    fn size_of_vmstack_chain(&self) -> u8 {
280        2 * self.size()
281    }
282
283    fn vmstack_limits_stack_limit(&self) -> u8 {
287        0
288    }
289
290    fn vmstack_limits_last_wasm_entry_fp(&self) -> u8 {
292        self.size()
293    }
294
295    fn vmhostarray_length(&self) -> u8 {
299        0
300    }
301
302    fn vmhostarray_capacity(&self) -> u8 {
304        4
305    }
306
307    fn vmhostarray_data(&self) -> u8 {
309        8
310    }
311
312    fn size_of_vmhostarray(&self) -> u8 {
314        8 + self.size()
315    }
316
317    fn vmcommon_stack_information_limits(&self) -> u8 {
321        0 * self.size()
322    }
323
324    fn vmcommon_stack_information_state(&self) -> u8 {
326        2 * self.size()
327    }
328
329    fn vmcommon_stack_information_handlers(&self) -> u8 {
331        u8::try_from(align(
332            self.vmcommon_stack_information_state() as u32 + 4,
333            u32::from(self.size()),
334        ))
335        .unwrap()
336    }
337
338    fn vmcommon_stack_information_first_switch_handler_index(&self) -> u8 {
340        self.vmcommon_stack_information_handlers() + self.size_of_vmhostarray()
341    }
342
343    fn size_of_vmcommon_stack_information(&self) -> u8 {
345        u8::try_from(align(
346            self.vmcommon_stack_information_first_switch_handler_index() as u32 + 4,
347            u32::from(self.size()),
348        ))
349        .unwrap()
350    }
351
352    fn vmcontobj_contref(&self) -> u8 {
356        0
357    }
358
359    fn vmcontobj_revision(&self) -> u8 {
361        self.size()
362    }
363
364    fn size_of_vmcontobj(&self) -> u8 {
366        u8::try_from(align(
367            u32::from(self.vmcontobj_revision())
368                + u32::try_from(core::mem::size_of::<usize>()).unwrap(),
369            u32::from(self.size()),
370        ))
371        .unwrap()
372    }
373
374    fn vmcontref_common_stack_information(&self) -> u8 {
378        0 * self.size()
379    }
380
381    fn vmcontref_parent_chain(&self) -> u8 {
383        u8::try_from(align(
384            (self.vmcontref_common_stack_information() + self.size_of_vmcommon_stack_information())
385                as u32,
386            u32::from(self.size()),
387        ))
388        .unwrap()
389    }
390
391    fn vmcontref_last_ancestor(&self) -> u8 {
393        self.vmcontref_parent_chain() + 2 * self.size()
394    }
395
396    fn vmcontref_revision(&self) -> u8 {
398        self.vmcontref_last_ancestor() + self.size()
399    }
400
401    fn vmcontref_stack(&self) -> u8 {
403        self.vmcontref_revision() + self.size()
404    }
405
406    fn vmcontref_args(&self) -> u8 {
408        self.vmcontref_stack() + 3 * self.size()
409    }
410
411    fn vmcontref_values(&self) -> u8 {
413        self.vmcontref_args() + self.size_of_vmhostarray()
414    }
415
416    #[inline]
418    fn vmctx_magic(&self) -> u8 {
419        0
423    }
424
425    #[inline]
427    fn vmctx_store_context(&self) -> u8 {
428        self.vmctx_magic() + self.size()
429    }
430
431    #[inline]
433    fn vmctx_builtin_functions(&self) -> u8 {
434        self.vmctx_store_context() + self.size()
435    }
436
437    #[inline]
440    fn vmctx_epoch_ptr(&self) -> u8 {
441        self.vmctx_builtin_functions() + self.size()
442    }
443
444    #[inline]
449    fn vmctx_gc_heap_data(&self) -> u8 {
450        self.vmctx_epoch_ptr() + self.size()
451    }
452
453    #[inline]
455    fn vmctx_type_ids_array(&self) -> u8 {
456        self.vmctx_gc_heap_data() + self.size()
457    }
458
459    #[inline]
463    fn vmctx_dynamic_data_start(&self) -> u8 {
464        self.vmctx_type_ids_array() + self.size()
465    }
466}
467
468#[derive(Clone, Copy)]
470pub struct HostPtr;
471
472impl PtrSize for HostPtr {
473    #[inline]
474    fn size(&self) -> u8 {
475        core::mem::size_of::<usize>() as u8
476    }
477}
478
479impl PtrSize for u8 {
480    #[inline]
481    fn size(&self) -> u8 {
482        *self
483    }
484}
485
486#[derive(Debug, Clone, Copy)]
488pub struct VMOffsetsFields<P> {
489    pub ptr: P,
491    pub num_imported_functions: u32,
493    pub num_imported_tables: u32,
495    pub num_imported_memories: u32,
497    pub num_imported_globals: u32,
499    pub num_imported_tags: u32,
501    pub num_defined_tables: u32,
503    pub num_defined_memories: u32,
505    pub num_owned_memories: u32,
507    pub num_defined_globals: u32,
509    pub num_defined_tags: u32,
511    pub num_escaped_funcs: u32,
514}
515
516impl<P: PtrSize> VMOffsets<P> {
517    pub fn new(ptr: P, module: &Module) -> Self {
519        let num_owned_memories = module
520            .memories
521            .iter()
522            .skip(module.num_imported_memories)
523            .filter(|p| !p.1.shared)
524            .count()
525            .try_into()
526            .unwrap();
527        VMOffsets::from(VMOffsetsFields {
528            ptr,
529            num_imported_functions: cast_to_u32(module.num_imported_funcs),
530            num_imported_tables: cast_to_u32(module.num_imported_tables),
531            num_imported_memories: cast_to_u32(module.num_imported_memories),
532            num_imported_globals: cast_to_u32(module.num_imported_globals),
533            num_imported_tags: cast_to_u32(module.num_imported_tags),
534            num_defined_tables: cast_to_u32(module.num_defined_tables()),
535            num_defined_memories: cast_to_u32(module.num_defined_memories()),
536            num_owned_memories,
537            num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
538            num_defined_tags: cast_to_u32(module.tags.len() - module.num_imported_tags),
539            num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
540        })
541    }
542
543    #[inline]
545    pub fn pointer_size(&self) -> u8 {
546        self.ptr.size()
547    }
548
549    pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
554        macro_rules! calculate_sizes {
555            ($($name:ident: $desc:tt,)*) => {{
556                let VMOffsets {
557                    ptr: _,
560                    num_imported_functions: _,
561                    num_imported_tables: _,
562                    num_imported_memories: _,
563                    num_imported_globals: _,
564                    num_imported_tags: _,
565                    num_defined_tables: _,
566                    num_defined_globals: _,
567                    num_defined_memories: _,
568                    num_defined_tags: _,
569                    num_owned_memories: _,
570                    num_escaped_funcs: _,
571
572                    size,
574
575                    $($name,)*
578                } = *self;
579
580                let mut last = size;
584                $(
585                    assert!($name <= last);
586                    let tmp = $name;
587                    let $name = last - $name;
588                    last = tmp;
589                )*
590                assert_ne!(last, 0);
591                IntoIterator::into_iter([
592                    $(($desc, $name),)*
593                    ("static vmctx data", last),
594                ])
595            }};
596        }
597
598        calculate_sizes! {
599            defined_func_refs: "module functions",
600            defined_tags: "defined tags",
601            defined_globals: "defined globals",
602            defined_tables: "defined tables",
603            imported_tags: "imported tags",
604            imported_globals: "imported globals",
605            imported_tables: "imported tables",
606            imported_functions: "imported functions",
607            owned_memories: "owned memories",
608            defined_memories: "defined memories",
609            imported_memories: "imported memories",
610        }
611    }
612}
613
614impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
615    fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
616        let mut ret = Self {
617            ptr: fields.ptr,
618            num_imported_functions: fields.num_imported_functions,
619            num_imported_tables: fields.num_imported_tables,
620            num_imported_memories: fields.num_imported_memories,
621            num_imported_globals: fields.num_imported_globals,
622            num_imported_tags: fields.num_imported_tags,
623            num_defined_tables: fields.num_defined_tables,
624            num_defined_memories: fields.num_defined_memories,
625            num_owned_memories: fields.num_owned_memories,
626            num_defined_globals: fields.num_defined_globals,
627            num_defined_tags: fields.num_defined_tags,
628            num_escaped_funcs: fields.num_escaped_funcs,
629            imported_functions: 0,
630            imported_tables: 0,
631            imported_memories: 0,
632            imported_globals: 0,
633            imported_tags: 0,
634            defined_tables: 0,
635            defined_memories: 0,
636            owned_memories: 0,
637            defined_globals: 0,
638            defined_tags: 0,
639            defined_func_refs: 0,
640            size: 0,
641        };
642
643        #[inline]
648        fn cadd(count: u32, size: u32) -> u32 {
649            count.checked_add(size).unwrap()
650        }
651
652        #[inline]
653        fn cmul(count: u32, size: u8) -> u32 {
654            count.checked_mul(u32::from(size)).unwrap()
655        }
656
657        let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
658
659        macro_rules! fields {
660            (size($field:ident) = $size:expr, $($rest:tt)*) => {
661                ret.$field = next_field_offset;
662                next_field_offset = cadd(next_field_offset, u32::from($size));
663                fields!($($rest)*);
664            };
665            (align($align:expr), $($rest:tt)*) => {
666                next_field_offset = align(next_field_offset, $align);
667                fields!($($rest)*);
668            };
669            () => {};
670        }
671
672        fields! {
673            size(imported_memories)
674                = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
675            size(defined_memories)
676                = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
677            size(owned_memories)
678                = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
679            size(imported_functions)
680                = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
681            size(imported_tables)
682                = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
683            size(imported_globals)
684                = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
685            size(imported_tags)
686                = cmul(ret.num_imported_tags, ret.size_of_vmtag_import()),
687            size(defined_tables)
688                = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
689            align(16),
690            size(defined_globals)
691                = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
692            size(defined_tags)
693                = cmul(ret.num_defined_tags, ret.ptr.size_of_vmtag_definition()),
694            size(defined_func_refs) = cmul(
695                ret.num_escaped_funcs,
696                ret.ptr.size_of_vm_func_ref(),
697            ),
698        }
699
700        ret.size = next_field_offset;
701
702        return ret;
703    }
704}
705
706impl<P: PtrSize> VMOffsets<P> {
707    #[inline]
709    pub fn vmfunction_import_wasm_call(&self) -> u8 {
710        0 * self.pointer_size()
711    }
712
713    #[inline]
715    pub fn vmfunction_import_array_call(&self) -> u8 {
716        1 * self.pointer_size()
717    }
718
719    #[inline]
721    pub fn vmfunction_import_vmctx(&self) -> u8 {
722        2 * self.pointer_size()
723    }
724
725    #[inline]
727    pub fn size_of_vmfunction_import(&self) -> u8 {
728        3 * self.pointer_size()
729    }
730}
731
732impl<P: PtrSize> VMOffsets<P> {
734    pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
736        1 * self.pointer_size()
737    }
738}
739
740impl<P: PtrSize> VMOffsets<P> {
742    #[inline]
744    pub fn vmtable_import_from(&self) -> u8 {
745        0 * self.pointer_size()
746    }
747
748    #[inline]
750    pub fn vmtable_import_vmctx(&self) -> u8 {
751        1 * self.pointer_size()
752    }
753
754    #[inline]
756    pub fn vmtable_import_index(&self) -> u8 {
757        2 * self.pointer_size()
758    }
759
760    #[inline]
762    pub fn size_of_vmtable_import(&self) -> u8 {
763        3 * self.pointer_size()
764    }
765}
766
767impl<P: PtrSize> VMOffsets<P> {
769    #[inline]
771    pub fn vmtable_definition_base(&self) -> u8 {
772        0 * self.pointer_size()
773    }
774
775    pub fn vmtable_definition_current_elements(&self) -> u8 {
777        1 * self.pointer_size()
778    }
779
780    #[inline]
782    pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
783        self.pointer_size()
784    }
785
786    #[inline]
788    pub fn size_of_vmtable_definition(&self) -> u8 {
789        2 * self.pointer_size()
790    }
791}
792
793impl<P: PtrSize> VMOffsets<P> {
795    #[inline]
797    pub fn vmmemory_import_from(&self) -> u8 {
798        0 * self.pointer_size()
799    }
800
801    #[inline]
803    pub fn vmmemory_import_vmctx(&self) -> u8 {
804        1 * self.pointer_size()
805    }
806
807    #[inline]
809    pub fn vmmemory_import_index(&self) -> u8 {
810        2 * self.pointer_size()
811    }
812
813    #[inline]
815    pub fn size_of_vmmemory_import(&self) -> u8 {
816        3 * self.pointer_size()
817    }
818}
819
820impl<P: PtrSize> VMOffsets<P> {
822    #[inline]
824    pub fn vmglobal_import_from(&self) -> u8 {
825        0 * self.pointer_size()
826    }
827
828    #[inline]
830    pub fn size_of_vmglobal_import(&self) -> u8 {
831        2 * self.pointer_size() + 8
833    }
834}
835
836impl<P: PtrSize> VMOffsets<P> {
838    #[inline]
840    pub fn size_of_vmshared_type_index(&self) -> u8 {
841        4
842    }
843}
844
845impl<P: PtrSize> VMOffsets<P> {
847    #[inline]
849    pub fn vmtag_import_from(&self) -> u8 {
850        0 * self.pointer_size()
851    }
852
853    #[inline]
855    pub fn vmtag_import_vmctx(&self) -> u8 {
856        1 * self.pointer_size()
857    }
858
859    #[inline]
861    pub fn vmtag_import_index(&self) -> u8 {
862        2 * self.pointer_size()
863    }
864
865    #[inline]
867    pub fn size_of_vmtag_import(&self) -> u8 {
868        3 * self.pointer_size()
869    }
870}
871
872impl<P: PtrSize> VMOffsets<P> {
874    #[inline]
876    pub fn vmctx_imported_functions_begin(&self) -> u32 {
877        self.imported_functions
878    }
879
880    #[inline]
882    pub fn vmctx_imported_tables_begin(&self) -> u32 {
883        self.imported_tables
884    }
885
886    #[inline]
888    pub fn vmctx_imported_memories_begin(&self) -> u32 {
889        self.imported_memories
890    }
891
892    #[inline]
894    pub fn vmctx_imported_globals_begin(&self) -> u32 {
895        self.imported_globals
896    }
897
898    #[inline]
900    pub fn vmctx_imported_tags_begin(&self) -> u32 {
901        self.imported_tags
902    }
903
904    #[inline]
906    pub fn vmctx_tables_begin(&self) -> u32 {
907        self.defined_tables
908    }
909
910    #[inline]
912    pub fn vmctx_memories_begin(&self) -> u32 {
913        self.defined_memories
914    }
915
916    #[inline]
918    pub fn vmctx_owned_memories_begin(&self) -> u32 {
919        self.owned_memories
920    }
921
922    #[inline]
924    pub fn vmctx_globals_begin(&self) -> u32 {
925        self.defined_globals
926    }
927
928    #[inline]
930    pub fn vmctx_tags_begin(&self) -> u32 {
931        self.defined_tags
932    }
933
934    #[inline]
936    pub fn vmctx_func_refs_begin(&self) -> u32 {
937        self.defined_func_refs
938    }
939
940    #[inline]
942    pub fn size_of_vmctx(&self) -> u32 {
943        self.size
944    }
945
946    #[inline]
948    pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
949        assert!(index.as_u32() < self.num_imported_functions);
950        self.vmctx_imported_functions_begin()
951            + index.as_u32() * u32::from(self.size_of_vmfunction_import())
952    }
953
954    #[inline]
956    pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
957        assert!(index.as_u32() < self.num_imported_tables);
958        self.vmctx_imported_tables_begin()
959            + index.as_u32() * u32::from(self.size_of_vmtable_import())
960    }
961
962    #[inline]
964    pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
965        assert!(index.as_u32() < self.num_imported_memories);
966        self.vmctx_imported_memories_begin()
967            + index.as_u32() * u32::from(self.size_of_vmmemory_import())
968    }
969
970    #[inline]
972    pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
973        assert!(index.as_u32() < self.num_imported_globals);
974        self.vmctx_imported_globals_begin()
975            + index.as_u32() * u32::from(self.size_of_vmglobal_import())
976    }
977
978    #[inline]
980    pub fn vmctx_vmtag_import(&self, index: TagIndex) -> u32 {
981        assert!(index.as_u32() < self.num_imported_tags);
982        self.vmctx_imported_tags_begin() + index.as_u32() * u32::from(self.size_of_vmtag_import())
983    }
984
985    #[inline]
987    pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
988        assert!(index.as_u32() < self.num_defined_tables);
989        self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
990    }
991
992    #[inline]
994    pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
995        assert!(index.as_u32() < self.num_defined_memories);
996        self.vmctx_memories_begin()
997            + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
998    }
999
1000    #[inline]
1002    pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
1003        assert!(index.as_u32() < self.num_owned_memories);
1004        self.vmctx_owned_memories_begin()
1005            + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
1006    }
1007
1008    #[inline]
1010    pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
1011        assert!(index.as_u32() < self.num_defined_globals);
1012        self.vmctx_globals_begin()
1013            + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
1014    }
1015
1016    #[inline]
1018    pub fn vmctx_vmtag_definition(&self, index: DefinedTagIndex) -> u32 {
1019        assert!(index.as_u32() < self.num_defined_tags);
1020        self.vmctx_tags_begin() + index.as_u32() * u32::from(self.ptr.size_of_vmtag_definition())
1021    }
1022
1023    #[inline]
1026    pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
1027        assert!(!index.is_reserved_value());
1028        assert!(index.as_u32() < self.num_escaped_funcs);
1029        self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
1030    }
1031
1032    #[inline]
1034    pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
1035        self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
1036    }
1037
1038    #[inline]
1040    pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
1041        self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
1042    }
1043
1044    #[inline]
1046    pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
1047        self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
1048    }
1049
1050    #[inline]
1053    pub fn vmctx_vmtable_from(&self, index: TableIndex) -> u32 {
1054        self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
1055    }
1056
1057    #[inline]
1059    pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
1060        self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
1061    }
1062
1063    #[inline]
1065    pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
1066        self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
1067    }
1068
1069    #[inline]
1071    pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
1072        self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
1073    }
1074
1075    #[inline]
1077    pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
1078        self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
1079    }
1080
1081    #[inline]
1083    pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
1084        self.vmctx_vmmemory_definition(index)
1085            + u32::from(self.ptr.vmmemory_definition_current_length())
1086    }
1087
1088    #[inline]
1090    pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
1091        self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
1092    }
1093
1094    #[inline]
1096    pub fn vmctx_vmtag_import_from(&self, index: TagIndex) -> u32 {
1097        self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_from())
1098    }
1099
1100    #[inline]
1102    pub fn vmctx_vmtag_import_vmctx(&self, index: TagIndex) -> u32 {
1103        self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_vmctx())
1104    }
1105
1106    #[inline]
1108    pub fn vmctx_vmtag_import_index(&self, index: TagIndex) -> u32 {
1109        self.vmctx_vmtag_import(index) + u32::from(self.vmtag_import_index())
1110    }
1111}
1112
1113impl<P: PtrSize> VMOffsets<P> {
1115    #[inline]
1117    pub fn vm_gc_header_kind(&self) -> u32 {
1118        0
1119    }
1120
1121    #[inline]
1123    pub fn vm_gc_header_reserved_bits(&self) -> u32 {
1124        self.vm_gc_header_kind()
1126    }
1127
1128    #[inline]
1130    pub fn vm_gc_header_ty(&self) -> u32 {
1131        self.vm_gc_header_kind() + 4
1132    }
1133}
1134
1135impl<P: PtrSize> VMOffsets<P> {
1139    #[inline]
1141    pub fn vm_drc_header_ref_count(&self) -> u32 {
1142        8
1143    }
1144
1145    #[inline]
1147    pub fn vm_drc_header_next_over_approximated_stack_root(&self) -> u32 {
1148        self.vm_drc_header_ref_count() + 8
1149    }
1150}
1151
1152pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
1156
1157pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
1162
1163#[cfg(test)]
1164mod tests {
1165    use crate::vmoffsets::align;
1166
1167    #[test]
1168    fn alignment() {
1169        fn is_aligned(x: u32) -> bool {
1170            x % 16 == 0
1171        }
1172        assert!(is_aligned(align(0, 16)));
1173        assert!(is_aligned(align(32, 16)));
1174        assert!(is_aligned(align(33, 16)));
1175        assert!(is_aligned(align(31, 16)));
1176    }
1177}