1mod allocator;
11mod r#ref;
12
13pub use allocator::InstanceAllocator;
14pub use r#ref::{InstanceRef, WeakOrStrongInstanceRef};
15
16use crate::func_data_registry::VMFuncRef;
17use crate::global::Global;
18use crate::imports::Imports;
19use crate::memory::{Memory, MemoryError};
20use crate::sig_registry::VMSharedSignatureIndex;
21use crate::table::{Table, TableElement};
22use crate::trap::traphandlers::get_trap_handler;
23use crate::trap::{catch_traps, Trap, TrapCode};
24use crate::vmcontext::{
25 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody,
26 VMFunctionEnvironment, VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport,
27 VMLocalFunction, VMMemoryDefinition, VMMemoryImport, VMTableDefinition, VMTableImport,
28};
29use crate::{unc_vm_call_trampoline, Artifact, VMOffsets, VMTrampoline};
30use crate::{VMExtern, VMFunction, VMGlobal};
31use memoffset::offset_of;
32use more_asserts::assert_lt;
33use std::any::Any;
34use std::cell::RefCell;
35use std::collections::BTreeMap;
36use std::convert::TryFrom;
37use std::ffi;
38use std::fmt;
39use std::mem;
40use std::ptr::{self, NonNull};
41use std::slice;
42use std::sync::Arc;
43use unc_vm_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
44use unc_vm_types::{
45 DataIndex, DataInitializer, ElemIndex, ExportIndex, FastGasCounter, FunctionIndex, GlobalIndex,
46 GlobalInit, InstanceConfig, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryIndex,
47 OwnedTableInitializer, Pages, TableIndex,
48};
49
50pub type ImportInitializerFuncPtr<ResultErr = *mut ffi::c_void> =
53 fn(*mut ffi::c_void, *const ffi::c_void) -> Result<(), ResultErr>;
54
55#[repr(C)]
62pub struct Instance {
63 pub(crate) artifact: Arc<dyn Artifact>,
64
65 config: InstanceConfig,
67
68 memories: BoxedSlice<LocalMemoryIndex, Arc<dyn Memory>>,
70
71 tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
73
74 globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
76
77 passive_elements: RefCell<BTreeMap<ElemIndex, Box<[VMFuncRef]>>>,
80
81 passive_data: RefCell<BTreeMap<DataIndex, Arc<[u8]>>>,
84
85 funcrefs: BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc>,
89
90 host_state: Box<dyn Any>,
92
93 imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
99
100 vmctx: VMContext,
105}
106
107#[derive(Debug)]
109pub enum ImportFunctionEnv {
110 NoEnv,
113 Env {
120 env: *mut ffi::c_void,
123
124 clone: fn(*mut ffi::c_void) -> *mut ffi::c_void,
126 initializer: Option<ImportInitializerFuncPtr>,
130 destructor: unsafe fn(*mut ffi::c_void),
136 },
137}
138
139impl Clone for ImportFunctionEnv {
140 fn clone(&self) -> Self {
141 match &self {
142 Self::NoEnv => Self::NoEnv,
143 Self::Env { env, clone, destructor, initializer } => {
144 let new_env = (*clone)(*env);
145 Self::Env {
146 env: new_env,
147 clone: *clone,
148 destructor: *destructor,
149 initializer: *initializer,
150 }
151 }
152 }
153 }
154}
155
156impl Drop for ImportFunctionEnv {
157 fn drop(&mut self) {
158 match self {
159 Self::Env { env, destructor, .. } => {
160 unsafe {
164 (destructor)(*env);
165 }
166 }
167 Self::NoEnv => (),
168 }
169 }
170}
171
172impl fmt::Debug for Instance {
173 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
174 formatter.debug_struct("Instance").finish()
175 }
176}
177
178#[allow(clippy::cast_ptr_alignment)]
179impl Instance {
180 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
183 (self.vmctx_ptr() as *mut u8).add(usize::try_from(offset).unwrap()).cast()
184 }
185
186 fn offsets(&self) -> &VMOffsets {
188 self.artifact.offsets()
189 }
190
191 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
193 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_signature_ids_begin()) }
194 }
195
196 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
198 let index = usize::try_from(index.as_u32()).unwrap();
199 unsafe { &*self.imported_functions_ptr().add(index) }
200 }
201
202 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
204 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_functions_begin()) }
205 }
206
207 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
209 let index = usize::try_from(index.as_u32()).unwrap();
210 unsafe { &*self.imported_tables_ptr().add(index) }
211 }
212
213 fn imported_tables_ptr(&self) -> *mut VMTableImport {
215 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_tables_begin()) }
216 }
217
218 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
220 let index = usize::try_from(index.as_u32()).unwrap();
221 let addr = unsafe { self.imported_memories_ptr().add(index) };
222 let align = std::mem::align_of::<VMMemoryImport>();
223 debug_assert!(
224 addr as usize % align == 0,
225 "VMMemoryImport addr is not aligned to {align}: {addr:p}"
226 );
227 unsafe { &*addr }
228 }
229
230 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
232 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_memories_begin()) }
233 }
234
235 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
237 let index = usize::try_from(index.as_u32()).unwrap();
238 unsafe { &*self.imported_globals_ptr().add(index) }
239 }
240
241 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
243 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_globals_begin()) }
244 }
245
246 #[allow(unused)]
248 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
249 unsafe { *self.table_ptr(index).as_ref() }
250 }
251
252 #[allow(unused)]
254 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
255 unsafe {
256 *self.table_ptr(index).as_ptr() = *table;
257 }
258 }
259
260 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
262 let index = usize::try_from(index.as_u32()).unwrap();
263 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
264 }
265
266 fn tables_ptr(&self) -> *mut VMTableDefinition {
268 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_tables_begin()) }
269 }
270
271 fn memory_definition(&self, index: MemoryIndex) -> &VMMemoryDefinition {
273 match self.artifact.import_counts().local_memory_index(index) {
274 Ok(local) => unsafe { self.memory_ptr(local).as_ref() },
275 Err(import) => unsafe { &self.imported_memory(import).from.vmmemory().as_ref() },
276 }
277 }
278
279 #[allow(dead_code)]
280 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
282 unsafe {
283 *self.memory_ptr(index).as_ptr() = *mem;
284 }
285 }
286
287 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
289 let index = usize::try_from(index.as_u32()).unwrap();
290 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
291 }
292
293 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
295 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_memories_begin()) }
296 }
297
298 fn global(&self, index: GlobalIndex) -> &VMGlobalDefinition {
300 match self.artifact.import_counts().local_global_index(index) {
301 Ok(local) => unsafe { self.global_ptr(local).as_ref() },
302 Err(import) => unsafe { self.imported_global(import).definition.as_ref() },
303 }
304 }
305
306 #[allow(dead_code)]
308 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
309 unsafe {
310 *self.global_ptr(index).as_ptr() = global.clone();
311 }
312 }
313
314 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
316 let index = usize::try_from(index.as_u32()).unwrap();
317 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
319 }
320
321 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
323 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_globals_begin()) }
324 }
325
326 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
328 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_builtin_functions_begin()) }
329 }
330
331 fn vmctx(&self) -> &VMContext {
333 &self.vmctx
334 }
335
336 fn vmctx_ptr(&self) -> *mut VMContext {
338 self.vmctx() as *const VMContext as *mut VMContext
339 }
340
341 #[inline]
343 pub fn host_state(&self) -> &dyn Any {
344 &*self.host_state
345 }
346
347 fn trap_catcher_ptr(&self) -> *mut *const u8 {
349 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_trap_handler()) }
350 }
351
352 pub fn gas_counter_ptr(&self) -> *mut *const FastGasCounter {
354 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_gas_limiter_pointer()) }
355 }
356
357 pub fn stack_limit_initial_ptr(&self) -> *mut u32 {
359 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_initial_begin()) }
360 }
361
362 pub fn stack_limit_ptr(&self) -> *mut u32 {
364 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_begin()) }
365 }
366
367 fn invoke_start_function(&self) -> Result<(), Trap> {
369 let start_index = match self.artifact.start_function() {
370 Some(idx) => idx,
371 None => return Ok(()),
372 };
373 let start_funcref = self.funcrefs[start_index];
374 self.reset_stack_meter();
376 let result = unsafe {
377 catch_traps(|| {
378 mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionEnvironment)>(
379 start_funcref.func_ptr,
380 )(start_funcref.vmctx)
381 })
382 };
383 result
384 }
385
386 pub fn reset_stack_meter(&self) {
387 unsafe {
388 *(self.stack_limit_ptr()) = *(self.stack_limit_initial_ptr());
389 }
390 }
391
392 #[inline]
394 pub(crate) fn vmctx_offset() -> isize {
395 offset_of!(Self, vmctx) as isize
396 }
397
398 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
400 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
401 let end: *const VMTableDefinition = table;
402 let index = LocalTableIndex::new(
404 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
405 );
406 assert_lt!(index.index(), self.tables.len());
407 index
408 }
409
410 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
412 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
413 let end: *const VMMemoryDefinition = memory;
414 let index = LocalMemoryIndex::new(
416 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
417 );
418 assert_lt!(index.index(), self.memories.len());
419 index
420 }
421
422 pub(crate) fn memory_grow<IntoPages>(
427 &self,
428 memory_index: LocalMemoryIndex,
429 delta: IntoPages,
430 ) -> Result<Pages, MemoryError>
431 where
432 IntoPages: Into<Pages>,
433 {
434 let mem = self
435 .memories
436 .get(memory_index)
437 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
438 mem.grow(delta.into())
439 }
440
441 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
450 &self,
451 memory_index: MemoryIndex,
452 delta: IntoPages,
453 ) -> Result<Pages, MemoryError>
454 where
455 IntoPages: Into<Pages>,
456 {
457 let import = self.imported_memory(memory_index);
458 import.from.grow(delta.into())
459 }
460
461 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
463 self.memories
464 .get(memory_index)
465 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()))
466 .size()
467 }
468
469 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
475 self.imported_memory(memory_index).from.size()
476 }
477
478 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
480 self.tables[table_index].size()
481 }
482
483 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
488 self.imported_table(table_index).from.size()
489 }
490
491 pub(crate) fn table_grow(
496 &self,
497 table_index: LocalTableIndex,
498 delta: u32,
499 init_value: TableElement,
500 ) -> Option<u32> {
501 let result = self
502 .tables
503 .get(table_index)
504 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
505 .grow(delta, init_value);
506
507 result
508 }
509
510 pub(crate) unsafe fn imported_table_grow(
515 &self,
516 table_index: TableIndex,
517 delta: u32,
518 init_value: TableElement,
519 ) -> Option<u32> {
520 let import = self.imported_table(table_index);
521 import.from.grow(delta, init_value)
522 }
523
524 pub(crate) fn table_get(
526 &self,
527 table_index: LocalTableIndex,
528 index: u32,
529 ) -> Option<TableElement> {
530 self.tables
531 .get(table_index)
532 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
533 .get(index)
534 }
535
536 pub(crate) unsafe fn imported_table_get(
541 &self,
542 table_index: TableIndex,
543 index: u32,
544 ) -> Option<TableElement> {
545 let import = self.imported_table(table_index);
546 import.from.get(index)
547 }
548
549 pub(crate) fn table_set(
551 &self,
552 table_index: LocalTableIndex,
553 index: u32,
554 val: TableElement,
555 ) -> Result<(), Trap> {
556 self.tables
557 .get(table_index)
558 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
559 .set(index, val)
560 }
561
562 pub(crate) unsafe fn imported_table_set(
567 &self,
568 table_index: TableIndex,
569 index: u32,
570 val: TableElement,
571 ) -> Result<(), Trap> {
572 let import = self.imported_table(table_index);
573 import.from.set(index, val)
574 }
575
576 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
577 Some(self.get_vm_funcref(function_index))
578 }
579
580 fn get_vm_funcref(&self, index: FunctionIndex) -> VMFuncRef {
582 if index == FunctionIndex::reserved_value() {
583 return VMFuncRef::null();
584 }
585 VMFuncRef(&self.funcrefs[index])
586 }
587
588 pub(crate) fn table_init(
596 &self,
597 table_index: TableIndex,
598 elem_index: ElemIndex,
599 dst: u32,
600 src: u32,
601 len: u32,
602 ) -> Result<(), Trap> {
603 let table = self.get_table(table_index);
606 let passive_elements = self.passive_elements.borrow();
607 let elem = passive_elements.get(&elem_index).map_or::<&[VMFuncRef], _>(&[], |e| &**e);
608
609 if src.checked_add(len).map_or(true, |n| n as usize > elem.len())
610 || dst.checked_add(len).map_or(true, |m| m > table.size())
611 {
612 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
613 }
614
615 for (dst, src) in (dst..dst + len).zip(src..src + len) {
616 table
617 .set(dst, TableElement::FuncRef(elem[src as usize]))
618 .expect("should never panic because we already did the bounds check above");
619 }
620
621 Ok(())
622 }
623
624 pub(crate) fn table_fill(
630 &self,
631 table_index: TableIndex,
632 start_index: u32,
633 item: TableElement,
634 len: u32,
635 ) -> Result<(), Trap> {
636 let table = self.get_table(table_index);
639 let table_size = table.size() as usize;
640
641 if start_index.checked_add(len).map_or(true, |n| n as usize > table_size) {
642 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
643 }
644
645 for i in start_index..(start_index + len) {
646 table
647 .set(i, item.clone())
648 .expect("should never panic because we already did the bounds check above");
649 }
650
651 Ok(())
652 }
653
654 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
656 let mut passive_elements = self.passive_elements.borrow_mut();
659 passive_elements.remove(&elem_index);
660 }
663
664 pub(crate) fn local_memory_copy(
671 &self,
672 memory_index: LocalMemoryIndex,
673 dst: u32,
674 src: u32,
675 len: u32,
676 ) -> Result<(), Trap> {
677 let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
679 unsafe { memory.memory_copy(dst, src, len) }
681 }
682
683 pub(crate) fn imported_memory_copy(
685 &self,
686 memory_index: MemoryIndex,
687 dst: u32,
688 src: u32,
689 len: u32,
690 ) -> Result<(), Trap> {
691 let import = self.imported_memory(memory_index);
692 unsafe { import.from.vmmemory().as_ref().memory_copy(dst, src, len) }
694 }
695
696 pub(crate) fn local_memory_fill(
702 &self,
703 memory_index: LocalMemoryIndex,
704 dst: u32,
705 val: u32,
706 len: u32,
707 ) -> Result<(), Trap> {
708 let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
709 unsafe { memory.memory_fill(dst, val, len) }
711 }
712
713 pub(crate) fn imported_memory_fill(
719 &self,
720 memory_index: MemoryIndex,
721 dst: u32,
722 val: u32,
723 len: u32,
724 ) -> Result<(), Trap> {
725 let import = self.imported_memory(memory_index);
726 unsafe { import.from.vmmemory().as_ref().memory_fill(dst, val, len) }
728 }
729
730 pub(crate) fn memory_init(
738 &self,
739 memory_index: MemoryIndex,
740 data_index: DataIndex,
741 dst: u32,
742 src: u32,
743 len: u32,
744 ) -> Result<(), Trap> {
745 let memory = self.memory_definition(memory_index);
748 let passive_data = self.passive_data.borrow();
749 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
750
751 let oob_access = src.checked_add(len).map_or(true, |n| n as usize > data.len())
752 || dst
753 .checked_add(len)
754 .map_or(true, |m| usize::try_from(m).unwrap() > memory.current_length);
755
756 if oob_access {
757 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
758 }
759 let src_slice = &data[src as usize..(src + len) as usize];
760 unsafe {
761 let dst_start = memory.base.add(dst as usize);
762 let dst_slice = slice::from_raw_parts_mut(dst_start, len as usize);
763 dst_slice.copy_from_slice(src_slice);
764 }
765 Ok(())
766 }
767
768 pub(crate) fn data_drop(&self, data_index: DataIndex) {
770 let mut passive_data = self.passive_data.borrow_mut();
771 passive_data.remove(&data_index);
772 }
773
774 pub(crate) fn get_table(&self, table_index: TableIndex) -> &dyn Table {
777 match self.artifact.import_counts().local_table_index(table_index) {
778 Ok(local) => self.get_local_table(local),
779 Err(import) => self.get_foreign_table(import),
780 }
781 }
782
783 pub(crate) fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
785 self.tables[index].as_ref()
786 }
787
788 pub(crate) fn get_foreign_table(&self, index: TableIndex) -> &dyn Table {
790 let import = self.imported_table(index);
791 &*import.from
792 }
793}
794
795#[derive(Debug, PartialEq)]
801pub struct InstanceHandle {
802 instance: InstanceRef,
804}
805
806impl InstanceHandle {
807 #[allow(clippy::too_many_arguments)]
830 pub unsafe fn new(
831 artifact: Arc<dyn Artifact>,
832 allocator: InstanceAllocator,
833 finished_memories: BoxedSlice<LocalMemoryIndex, Arc<dyn Memory>>,
834 finished_tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
835 finished_globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
836 imports: Imports,
837 passive_data: BTreeMap<DataIndex, Arc<[u8]>>,
838 host_state: Box<dyn Any>,
839 imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
840 instance_config: InstanceConfig,
841 ) -> Self {
842 let vmctx_globals = finished_globals
843 .values()
844 .map(|m| m.vmglobal())
845 .collect::<PrimaryMap<LocalGlobalIndex, _>>()
846 .into_boxed_slice();
847 let passive_data = RefCell::new(passive_data);
848
849 let handle = {
850 let funcrefs = PrimaryMap::new().into_boxed_slice();
852 let instance = Instance {
854 artifact,
855 config: instance_config.clone(),
856 memories: finished_memories,
857 tables: finished_tables,
858 globals: finished_globals,
859 passive_elements: Default::default(),
860 passive_data,
861 host_state,
862 funcrefs,
863 imported_function_envs,
864 vmctx: VMContext {},
865 };
866
867 let mut instance_ref = allocator.write_instance(instance);
868
869 {
871 let instance = instance_ref.as_mut().unwrap();
872 let vmctx_ptr = instance.vmctx_ptr();
873 instance.funcrefs = build_funcrefs(
874 &imports,
875 instance.artifact.functions().iter().map(|(_, f)| f),
876 vmctx_ptr,
877 );
878 *(instance.trap_catcher_ptr()) = get_trap_handler();
879 *(instance.gas_counter_ptr()) = instance_config.gas_counter;
880 *(instance.stack_limit_ptr()) = instance_config.stack_limit;
881 *(instance.stack_limit_initial_ptr()) = instance_config.stack_limit;
882 }
883
884 Self { instance: instance_ref }
885 };
886 let instance = handle.instance().as_ref();
887
888 ptr::copy(
889 instance.artifact.signatures().as_ptr(),
890 instance.signature_ids_ptr() as *mut VMSharedSignatureIndex,
891 instance.artifact.signatures().len(),
892 );
893
894 ptr::copy(
895 imports.functions.values().as_slice().as_ptr(),
896 instance.imported_functions_ptr() as *mut VMFunctionImport,
897 imports.functions.len(),
898 );
899 ptr::copy(
900 imports.tables.values().as_slice().as_ptr(),
901 instance.imported_tables_ptr() as *mut VMTableImport,
902 imports.tables.len(),
903 );
904 ptr::copy(
905 imports.memories.values().as_slice().as_ptr(),
906 instance.imported_memories_ptr() as *mut VMMemoryImport,
907 imports.memories.len(),
908 );
909 ptr::copy(
910 imports.globals.values().as_slice().as_ptr(),
911 instance.imported_globals_ptr() as *mut VMGlobalImport,
912 imports.globals.len(),
913 );
914 ptr::copy(
918 vmctx_globals.values().as_slice().as_ptr(),
919 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
920 vmctx_globals.len(),
921 );
922 ptr::write(
923 instance.builtin_functions_ptr() as *mut VMBuiltinFunctionsArray,
924 VMBuiltinFunctionsArray::initialized(),
925 );
926
927 initialize_passive_elements(instance);
930 initialize_globals(instance);
931 handle
932 }
933
934 pub fn instance(&self) -> &InstanceRef {
936 &self.instance
937 }
938
939 pub unsafe fn finish_instantiation(&self) -> Result<(), Trap> {
945 let instance = self.instance().as_ref();
946
947 initialize_tables(instance)?;
949 initialize_memories(instance, instance.artifact.data_segments().iter().map(Into::into))?;
950
951 instance.invoke_start_function()?;
954 Ok(())
955 }
956
957 pub unsafe fn invoke_function(
959 &self,
960 vmctx: VMFunctionEnvironment,
961 trampoline: VMTrampoline,
962 callee: *const VMFunctionBody,
963 values_vec: *mut u8,
964 ) -> Result<(), Trap> {
965 {
967 let instance = self.instance().as_ref();
968 instance.reset_stack_meter();
969 }
970 unc_vm_call_trampoline(vmctx, trampoline, callee, values_vec)
971 }
972
973 pub fn vmctx(&self) -> &VMContext {
975 self.instance().as_ref().vmctx()
976 }
977
978 pub fn vmctx_ptr(&self) -> *mut VMContext {
980 self.instance().as_ref().vmctx_ptr()
981 }
982
983 pub fn vmoffsets(&self) -> &VMOffsets {
987 self.instance().as_ref().offsets()
988 }
989
990 pub fn function_by_index(&self, idx: FunctionIndex) -> Option<VMFunction> {
992 let instance = self.instance.as_ref();
993
994 let (address, signature, vmctx, call_trampoline) =
995 match instance.artifact.import_counts().local_function_index(idx) {
996 Ok(local) => {
997 let func = instance.artifact.functions().get(local)?;
998 (
999 *(func.body),
1000 func.signature,
1001 VMFunctionEnvironment { vmctx: instance.vmctx_ptr() },
1002 Some(func.trampoline),
1003 )
1004 }
1005 Err(import) => {
1006 let import = instance.imported_function(import);
1007 (*(import.body), import.signature, import.environment, import.trampoline)
1008 }
1009 };
1010 Some(VMFunction {
1011 kind: VMFunctionKind::Static,
1016 address,
1017 signature,
1018 vmctx,
1019 call_trampoline,
1020 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1021 })
1022 }
1023
1024 fn memory_by_index(&self, index: MemoryIndex) -> Option<crate::VMMemory> {
1026 let instance = self.instance.as_ref();
1027 let from = match instance.artifact.import_counts().local_memory_index(index) {
1028 Ok(local) => Arc::clone(&instance.memories[local]),
1029 Err(import) => Arc::clone(&instance.imported_memory(import).from),
1030 };
1031 Some(crate::VMMemory {
1032 from,
1033 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1034 })
1035 }
1036
1037 fn table_by_index(&self, index: TableIndex) -> Option<crate::VMTable> {
1039 let instance = self.instance.as_ref();
1040 let from = match instance.artifact.import_counts().local_table_index(index) {
1041 Ok(local) => Arc::clone(&instance.tables[local]),
1042 Err(import) => Arc::clone(&instance.imported_table(import).from),
1043 };
1044 Some(crate::VMTable {
1045 from,
1046 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1047 })
1048 }
1049
1050 pub fn global_by_index(&self, index: GlobalIndex) -> Option<VMGlobal> {
1052 let instance = self.instance.as_ref();
1053 let from = match instance.artifact.import_counts().local_global_index(index) {
1054 Ok(local) => Arc::clone(&instance.globals[local]),
1055 Err(import) => Arc::clone(&instance.imported_global(import).from),
1056 };
1057 Some(crate::VMGlobal {
1058 from,
1059 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1060 })
1061 }
1062
1063 pub fn lookup(&self, field: &str) -> Option<VMExtern> {
1065 let instance = self.instance.as_ref();
1066 Some(match instance.artifact.export_field(field)? {
1067 ExportIndex::Function(idx) => VMExtern::Function(self.function_by_index(idx)?),
1068 ExportIndex::Table(idx) => VMExtern::Table(self.table_by_index(idx)?),
1069 ExportIndex::Global(idx) => VMExtern::Global(self.global_by_index(idx)?),
1070 ExportIndex::Memory(idx) => VMExtern::Memory(self.memory_by_index(idx)?),
1071 })
1072 }
1073
1074 pub fn host_state(&self) -> &dyn Any {
1076 self.instance().as_ref().host_state()
1077 }
1078
1079 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1081 self.instance().as_ref().memory_index(memory)
1082 }
1083
1084 pub fn memory_grow<IntoPages>(
1089 &self,
1090 memory_index: LocalMemoryIndex,
1091 delta: IntoPages,
1092 ) -> Result<Pages, MemoryError>
1093 where
1094 IntoPages: Into<Pages>,
1095 {
1096 self.instance().as_ref().memory_grow(memory_index, delta)
1097 }
1098
1099 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1101 self.instance().as_ref().table_index(table)
1102 }
1103
1104 pub fn table_grow(
1109 &self,
1110 table_index: LocalTableIndex,
1111 delta: u32,
1112 init_value: TableElement,
1113 ) -> Option<u32> {
1114 self.instance().as_ref().table_grow(table_index, delta, init_value)
1115 }
1116
1117 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1121 self.instance().as_ref().table_get(table_index, index)
1122 }
1123
1124 pub fn table_set(
1128 &self,
1129 table_index: LocalTableIndex,
1130 index: u32,
1131 val: TableElement,
1132 ) -> Result<(), Trap> {
1133 self.instance().as_ref().table_set(table_index, index, val)
1134 }
1135
1136 pub fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
1138 self.instance().as_ref().get_local_table(index)
1139 }
1140}
1141
1142#[tracing::instrument(target = "unc_vm", level = "trace", skip_all)]
1150pub unsafe fn initialize_host_envs<Err: Sized>(
1151 handle: &std::sync::Mutex<InstanceHandle>,
1152 instance_ptr: *const ffi::c_void,
1153) -> Result<(), Err> {
1154 let initializers = {
1155 let mut instance_lock = handle.lock().unwrap();
1156 let instance_ref = instance_lock.instance.as_mut_unchecked();
1157 let mut initializers = vec![];
1158 for import_function_env in instance_ref.imported_function_envs.values_mut() {
1159 match import_function_env {
1160 ImportFunctionEnv::Env { env, ref mut initializer, .. } => {
1161 if let Some(init) = initializer.take() {
1162 initializers.push((init, *env));
1163 }
1164 }
1165 ImportFunctionEnv::NoEnv => (),
1166 }
1167 }
1168 initializers
1169 };
1170 for (init, env) in initializers {
1171 let f = mem::transmute::<&ImportInitializerFuncPtr, &ImportInitializerFuncPtr<Err>>(&init);
1172 f(env, instance_ptr)?;
1173 }
1174 Ok(())
1175}
1176
1177fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1179 let mut start = init.location.offset;
1180 if let Some(base) = init.location.base {
1181 let val = instance.global(base).to_u32();
1182 start += usize::try_from(val).unwrap();
1183 }
1184 start
1185}
1186
1187#[allow(clippy::mut_from_ref)]
1188unsafe fn get_memory_slice<'instance>(
1190 init: &DataInitializer<'_>,
1191 instance: &'instance Instance,
1192) -> &'instance mut [u8] {
1193 let memory = instance.memory_definition(init.location.memory_index);
1194 slice::from_raw_parts_mut(memory.base, memory.current_length)
1195}
1196
1197fn get_table_init_start(init: &OwnedTableInitializer, instance: &Instance) -> usize {
1199 let mut start = init.offset;
1200 if let Some(base) = init.base {
1201 let val = instance.global(base).to_u32();
1202 start += usize::try_from(val).unwrap();
1203 }
1204 start
1205}
1206
1207fn initialize_tables(instance: &Instance) -> Result<(), Trap> {
1209 for init in instance.artifact.element_segments() {
1210 let start = get_table_init_start(init, instance);
1211 let table = instance.get_table(init.table_index);
1212
1213 if start.checked_add(init.elements.len()).map_or(true, |end| end > table.size() as usize) {
1214 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1215 }
1216
1217 for (i, func_idx) in init.elements.iter().enumerate() {
1218 let anyfunc = instance.get_vm_funcref(*func_idx);
1219 table.set(u32::try_from(start + i).unwrap(), TableElement::FuncRef(anyfunc)).unwrap();
1220 }
1221 }
1222
1223 Ok(())
1224}
1225
1226fn initialize_passive_elements(instance: &Instance) {
1230 let mut passive_elements = instance.passive_elements.borrow_mut();
1231 debug_assert!(
1232 passive_elements.is_empty(),
1233 "should only be called once, at initialization time"
1234 );
1235
1236 passive_elements.extend(
1237 instance
1238 .artifact
1239 .passive_elements()
1240 .iter()
1241 .filter(|(_, segments)| !segments.is_empty())
1242 .map(|(idx, segments)| {
1243 (*idx, segments.iter().map(|s| instance.get_vm_funcref(*s)).collect())
1244 }),
1245 );
1246}
1247
1248fn initialize_memories<'a>(
1250 instance: &Instance,
1251 data_initializers: impl Iterator<Item = DataInitializer<'a>>,
1252) -> Result<(), Trap> {
1253 for init in data_initializers {
1254 let memory = instance.memory_definition(init.location.memory_index);
1255
1256 let start = get_memory_init_start(&init, instance);
1257 if start.checked_add(init.data.len()).map_or(true, |end| end > memory.current_length) {
1258 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1259 }
1260
1261 unsafe {
1262 let mem_slice = get_memory_slice(&init, instance);
1263 let end = start + init.data.len();
1264 let to_init = &mut mem_slice[start..end];
1265 to_init.copy_from_slice(init.data);
1266 }
1267 }
1268
1269 Ok(())
1270}
1271
1272fn initialize_globals(instance: &Instance) {
1273 for (index, (_, initializer)) in instance.artifact.globals().iter().enumerate() {
1274 unsafe {
1275 let to = instance.global_ptr(LocalGlobalIndex::new(index)).as_ptr();
1276 match initializer {
1277 GlobalInit::I32Const(x) => *(*to).as_i32_mut() = *x,
1278 GlobalInit::I64Const(x) => *(*to).as_i64_mut() = *x,
1279 GlobalInit::F32Const(x) => *(*to).as_f32_mut() = *x,
1280 GlobalInit::F64Const(x) => *(*to).as_f64_mut() = *x,
1281 GlobalInit::V128Const(x) => *(*to).as_bytes_mut() = *x.bytes(),
1282 GlobalInit::GetGlobal(x) => *to = instance.global(*x).clone(),
1283 GlobalInit::RefNullConst => *(*to).as_funcref_mut() = VMFuncRef::null(),
1284 GlobalInit::RefFunc(func_idx) => {
1285 let funcref = instance.func_ref(*func_idx).unwrap();
1286 *(*to).as_funcref_mut() = funcref;
1287 }
1288 }
1289 }
1290 }
1291}
1292
1293pub fn build_funcrefs<'a>(
1296 imports: &Imports,
1297 finished_functions: impl ExactSizeIterator<Item = &'a VMLocalFunction>,
1298 vmctx_ptr: *mut VMContext,
1300) -> BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc> {
1301 let mut func_refs =
1302 PrimaryMap::with_capacity(imports.functions.len() + finished_functions.len());
1303 for (_, import) in imports.functions.iter() {
1304 let anyfunc = VMCallerCheckedAnyfunc {
1305 func_ptr: *(import.body),
1306 type_index: import.signature,
1307 vmctx: import.environment,
1308 };
1309 func_refs.push(anyfunc);
1310 }
1311 for function in finished_functions {
1313 let anyfunc = VMCallerCheckedAnyfunc {
1314 func_ptr: *(function.body),
1315 type_index: function.signature,
1316 vmctx: VMFunctionEnvironment { vmctx: vmctx_ptr },
1317 };
1318 func_refs.push(anyfunc);
1319 }
1320 func_refs.into_boxed_slice()
1321}