1mod allocator;
11mod r#ref;
12
13pub use allocator::InstanceAllocator;
14pub use r#ref::{InstanceRef, WeakOrStrongInstanceRef};
15
16use crate::func_data_registry::VMFuncRef;
17use crate::global::Global;
18use crate::imports::Imports;
19use crate::memory::{LinearMemory, MemoryError};
20use crate::sig_registry::VMSharedSignatureIndex;
21use crate::table::{Table, TableElement};
22use crate::trap::traphandlers::get_trap_handler;
23use crate::trap::{Trap, TrapCode, catch_traps};
24use crate::vmcontext::{
25 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody,
26 VMFunctionEnvironment, VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport,
27 VMLocalFunction, VMMemoryDefinition, VMMemoryImport, VMTableDefinition, VMTableImport,
28};
29use crate::{Artifact, VMOffsets, VMTrampoline, near_vm_call_trampoline};
30use crate::{VMExtern, VMFunction, VMGlobal};
31use memoffset::offset_of;
32use more_asserts::assert_lt;
33use near_vm_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
34use near_vm_types::{
35 DataIndex, DataInitializer, ElemIndex, ExportIndex, FastGasCounter, FunctionIndex, GlobalIndex,
36 GlobalInit, InstanceConfig, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryIndex,
37 OwnedTableInitializer, Pages, TableIndex,
38};
39use std::any::Any;
40use std::cell::RefCell;
41use std::collections::BTreeMap;
42use std::convert::TryFrom;
43use std::ffi;
44use std::fmt;
45use std::mem;
46use std::ptr::{self, NonNull};
47use std::slice;
48use std::sync::Arc;
49
50pub type ImportInitializerFuncPtr<ResultErr = *mut ffi::c_void> =
53 fn(*mut ffi::c_void, *const ffi::c_void) -> Result<(), ResultErr>;
54
55#[repr(C)]
62pub struct Instance {
63 pub(crate) artifact: Arc<dyn Artifact>,
64
65 config: InstanceConfig,
67
68 memories: BoxedSlice<LocalMemoryIndex, Arc<LinearMemory>>,
70
71 tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
73
74 globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
76
77 passive_elements: RefCell<BTreeMap<ElemIndex, Box<[VMFuncRef]>>>,
80
81 passive_data: RefCell<BTreeMap<DataIndex, Arc<[u8]>>>,
84
85 funcrefs: BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc>,
89
90 host_state: Box<dyn Any>,
92
93 imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
99
100 vmctx: VMContext,
105}
106
107#[derive(Debug)]
109pub enum ImportFunctionEnv {
110 NoEnv,
113 Env {
120 env: *mut ffi::c_void,
123
124 clone: fn(*mut ffi::c_void) -> *mut ffi::c_void,
126 initializer: Option<ImportInitializerFuncPtr>,
130 destructor: unsafe fn(*mut ffi::c_void),
136 },
137}
138
139impl Clone for ImportFunctionEnv {
140 fn clone(&self) -> Self {
141 match &self {
142 Self::NoEnv => Self::NoEnv,
143 Self::Env { env, clone, destructor, initializer } => {
144 let new_env = (*clone)(*env);
145 Self::Env {
146 env: new_env,
147 clone: *clone,
148 destructor: *destructor,
149 initializer: *initializer,
150 }
151 }
152 }
153 }
154}
155
156impl Drop for ImportFunctionEnv {
157 fn drop(&mut self) {
158 match self {
159 Self::Env { env, destructor, .. } => {
160 unsafe {
164 (destructor)(*env);
165 }
166 }
167 Self::NoEnv => (),
168 }
169 }
170}
171
172impl fmt::Debug for Instance {
173 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
174 formatter.debug_struct("Instance").finish()
175 }
176}
177
178#[allow(clippy::cast_ptr_alignment)]
179impl Instance {
180 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
183 unsafe { (self.vmctx_ptr() as *mut u8).add(usize::try_from(offset).unwrap()).cast() }
184 }
185
186 fn offsets(&self) -> &VMOffsets {
188 self.artifact.offsets()
189 }
190
191 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
193 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_signature_ids_begin()) }
194 }
195
196 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
198 let index = usize::try_from(index.as_u32()).unwrap();
199 unsafe { &*self.imported_functions_ptr().add(index) }
200 }
201
202 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
204 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_functions_begin()) }
205 }
206
207 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
209 let index = usize::try_from(index.as_u32()).unwrap();
210 unsafe { &*self.imported_tables_ptr().add(index) }
211 }
212
213 fn imported_tables_ptr(&self) -> *mut VMTableImport {
215 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_tables_begin()) }
216 }
217
218 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
220 let index = usize::try_from(index.as_u32()).unwrap();
221 let addr = unsafe { self.imported_memories_ptr().add(index) };
222 let align = std::mem::align_of::<VMMemoryImport>();
223 debug_assert!(
224 addr as usize % align == 0,
225 "VMMemoryImport addr is not aligned to {align}: {addr:p}"
226 );
227 unsafe { &*addr }
228 }
229
230 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
232 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_memories_begin()) }
233 }
234
235 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
237 let index = usize::try_from(index.as_u32()).unwrap();
238 unsafe { &*self.imported_globals_ptr().add(index) }
239 }
240
241 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
243 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_imported_globals_begin()) }
244 }
245
246 #[allow(unused)]
248 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
249 unsafe { *self.table_ptr(index).as_ref() }
250 }
251
252 #[allow(unused)]
254 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
255 unsafe {
256 *self.table_ptr(index).as_ptr() = *table;
257 }
258 }
259
260 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
262 let index = usize::try_from(index.as_u32()).unwrap();
263 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
264 }
265
266 fn tables_ptr(&self) -> *mut VMTableDefinition {
268 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_tables_begin()) }
269 }
270
271 fn memory_definition(&self, index: MemoryIndex) -> &VMMemoryDefinition {
273 match self.artifact.import_counts().local_memory_index(index) {
274 Ok(local) => unsafe { self.memory_ptr(local).as_ref() },
275 Err(import) => unsafe { &self.imported_memory(import).from.vmmemory().as_ref() },
276 }
277 }
278
279 #[allow(dead_code)]
280 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
282 unsafe {
283 *self.memory_ptr(index).as_ptr() = *mem;
284 }
285 }
286
287 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
289 let index = usize::try_from(index.as_u32()).unwrap();
290 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
291 }
292
293 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
295 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_memories_begin()) }
296 }
297
298 fn global(&self, index: GlobalIndex) -> &VMGlobalDefinition {
300 match self.artifact.import_counts().local_global_index(index) {
301 Ok(local) => unsafe { self.global_ptr(local).as_ref() },
302 Err(import) => unsafe { self.imported_global(import).definition.as_ref() },
303 }
304 }
305
306 #[allow(dead_code)]
308 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
309 unsafe {
310 *self.global_ptr(index).as_ptr() = global.clone();
311 }
312 }
313
314 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
316 let index = usize::try_from(index.as_u32()).unwrap();
317 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
319 }
320
321 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
323 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_globals_begin()) }
324 }
325
326 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
328 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_builtin_functions_begin()) }
329 }
330
331 fn vmctx(&self) -> &VMContext {
333 &self.vmctx
334 }
335
336 fn vmctx_ptr(&self) -> *mut VMContext {
338 self.vmctx() as *const VMContext as *mut VMContext
339 }
340
341 #[inline]
343 pub fn host_state(&self) -> &dyn Any {
344 &*self.host_state
345 }
346
347 fn trap_catcher_ptr(&self) -> *mut *const u8 {
349 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_trap_handler()) }
350 }
351
352 pub fn gas_counter_ptr(&self) -> *mut *const FastGasCounter {
354 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_gas_limiter_pointer()) }
355 }
356
357 pub fn stack_limit_initial_ptr(&self) -> *mut u32 {
359 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_initial_begin()) }
360 }
361
362 pub fn stack_limit_ptr(&self) -> *mut u32 {
364 unsafe { self.vmctx_plus_offset(self.offsets().vmctx_stack_limit_begin()) }
365 }
366
367 fn invoke_start_function(&self) -> Result<(), Trap> {
369 let start_index = match self.artifact.start_function() {
370 Some(idx) => idx,
371 None => return Ok(()),
372 };
373 let start_funcref = self.funcrefs[start_index];
374 self.reset_stack_meter();
376 let result = unsafe {
377 catch_traps(|| {
378 mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionEnvironment)>(
379 start_funcref.func_ptr,
380 )(start_funcref.vmctx)
381 })
382 };
383 result
384 }
385
386 pub fn reset_stack_meter(&self) {
387 unsafe {
388 *(self.stack_limit_ptr()) = *(self.stack_limit_initial_ptr());
389 }
390 }
391
392 #[inline]
394 pub(crate) fn vmctx_offset() -> isize {
395 offset_of!(Self, vmctx) as isize
396 }
397
398 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
400 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
401 let end: *const VMTableDefinition = table;
402 let index = LocalTableIndex::new(
404 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
405 );
406 assert_lt!(index.index(), self.tables.len());
407 index
408 }
409
410 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
412 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
413 let end: *const VMMemoryDefinition = memory;
414 let index = LocalMemoryIndex::new(
416 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
417 );
418 assert_lt!(index.index(), self.memories.len());
419 index
420 }
421
422 pub(crate) fn memory_grow<IntoPages>(
427 &self,
428 memory_index: LocalMemoryIndex,
429 delta: IntoPages,
430 ) -> Result<Pages, MemoryError>
431 where
432 IntoPages: Into<Pages>,
433 {
434 let mem = self
435 .memories
436 .get(memory_index)
437 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
438 mem.grow(delta.into())
439 }
440
441 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
450 &self,
451 memory_index: MemoryIndex,
452 delta: IntoPages,
453 ) -> Result<Pages, MemoryError>
454 where
455 IntoPages: Into<Pages>,
456 {
457 let import = self.imported_memory(memory_index);
458 import.from.grow(delta.into())
459 }
460
461 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
463 self.memories
464 .get(memory_index)
465 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()))
466 .size()
467 }
468
469 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
475 self.imported_memory(memory_index).from.size()
476 }
477
478 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
480 self.tables[table_index].size()
481 }
482
483 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
488 self.imported_table(table_index).from.size()
489 }
490
491 pub(crate) fn table_grow(
496 &self,
497 table_index: LocalTableIndex,
498 delta: u32,
499 init_value: TableElement,
500 ) -> Option<u32> {
501 let result = self
502 .tables
503 .get(table_index)
504 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
505 .grow(delta, init_value);
506
507 result
508 }
509
510 pub(crate) unsafe fn imported_table_grow(
515 &self,
516 table_index: TableIndex,
517 delta: u32,
518 init_value: TableElement,
519 ) -> Option<u32> {
520 let import = self.imported_table(table_index);
521 import.from.grow(delta, init_value)
522 }
523
524 pub(crate) fn table_get(
526 &self,
527 table_index: LocalTableIndex,
528 index: u32,
529 ) -> Option<TableElement> {
530 self.tables
531 .get(table_index)
532 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
533 .get(index)
534 }
535
536 pub(crate) unsafe fn imported_table_get(
541 &self,
542 table_index: TableIndex,
543 index: u32,
544 ) -> Option<TableElement> {
545 let import = self.imported_table(table_index);
546 import.from.get(index)
547 }
548
549 pub(crate) fn table_set(
551 &self,
552 table_index: LocalTableIndex,
553 index: u32,
554 val: TableElement,
555 ) -> Result<(), Trap> {
556 self.tables
557 .get(table_index)
558 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
559 .set(index, val)
560 }
561
562 pub(crate) unsafe fn imported_table_set(
567 &self,
568 table_index: TableIndex,
569 index: u32,
570 val: TableElement,
571 ) -> Result<(), Trap> {
572 let import = self.imported_table(table_index);
573 import.from.set(index, val)
574 }
575
576 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
577 Some(self.get_vm_funcref(function_index))
578 }
579
580 fn get_vm_funcref(&self, index: FunctionIndex) -> VMFuncRef {
582 if index == FunctionIndex::reserved_value() {
583 return VMFuncRef::null();
584 }
585 VMFuncRef(&self.funcrefs[index])
586 }
587
588 pub(crate) fn table_init(
596 &self,
597 table_index: TableIndex,
598 elem_index: ElemIndex,
599 dst: u32,
600 src: u32,
601 len: u32,
602 ) -> Result<(), Trap> {
603 let table = self.get_table(table_index);
606 let passive_elements = self.passive_elements.borrow();
607 let elem = passive_elements.get(&elem_index).map_or::<&[VMFuncRef], _>(&[], |e| &**e);
608
609 if src.checked_add(len).map_or(true, |n| n as usize > elem.len())
610 || dst.checked_add(len).map_or(true, |m| m > table.size())
611 {
612 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
613 }
614
615 for (dst, src) in (dst..dst + len).zip(src..src + len) {
616 table
617 .set(dst, TableElement::FuncRef(elem[src as usize]))
618 .expect("should never panic because we already did the bounds check above");
619 }
620
621 Ok(())
622 }
623
624 pub(crate) fn table_fill(
630 &self,
631 table_index: TableIndex,
632 start_index: u32,
633 item: TableElement,
634 len: u32,
635 ) -> Result<(), Trap> {
636 let table = self.get_table(table_index);
639 let table_size = table.size() as usize;
640
641 if start_index.checked_add(len).map_or(true, |n| n as usize > table_size) {
642 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
643 }
644
645 for i in start_index..(start_index + len) {
646 table
647 .set(i, item.clone())
648 .expect("should never panic because we already did the bounds check above");
649 }
650
651 Ok(())
652 }
653
654 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
656 let mut passive_elements = self.passive_elements.borrow_mut();
659 passive_elements.remove(&elem_index);
660 }
663
664 pub(crate) fn local_memory_copy(
671 &self,
672 memory_index: LocalMemoryIndex,
673 dst: u32,
674 src: u32,
675 len: u32,
676 ) -> Result<(), Trap> {
677 let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
679 unsafe { memory.memory_copy(dst, src, len) }
681 }
682
683 pub(crate) fn imported_memory_copy(
685 &self,
686 memory_index: MemoryIndex,
687 dst: u32,
688 src: u32,
689 len: u32,
690 ) -> Result<(), Trap> {
691 let import = self.imported_memory(memory_index);
692 unsafe { import.from.vmmemory().as_ref().memory_copy(dst, src, len) }
694 }
695
696 pub(crate) fn local_memory_fill(
702 &self,
703 memory_index: LocalMemoryIndex,
704 dst: u32,
705 val: u32,
706 len: u32,
707 ) -> Result<(), Trap> {
708 let memory = unsafe { self.memory_ptr(memory_index).as_ref() };
709 unsafe { memory.memory_fill(dst, val, len) }
711 }
712
713 pub(crate) fn imported_memory_fill(
719 &self,
720 memory_index: MemoryIndex,
721 dst: u32,
722 val: u32,
723 len: u32,
724 ) -> Result<(), Trap> {
725 let import = self.imported_memory(memory_index);
726 unsafe { import.from.vmmemory().as_ref().memory_fill(dst, val, len) }
728 }
729
730 pub(crate) fn memory_init(
738 &self,
739 memory_index: MemoryIndex,
740 data_index: DataIndex,
741 dst: u32,
742 src: u32,
743 len: u32,
744 ) -> Result<(), Trap> {
745 let memory = self.memory_definition(memory_index);
748 let passive_data = self.passive_data.borrow();
749 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
750
751 let oob_access = src.checked_add(len).map_or(true, |n| n as usize > data.len())
752 || dst
753 .checked_add(len)
754 .map_or(true, |m| usize::try_from(m).unwrap() > memory.current_length);
755
756 if oob_access {
757 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
758 }
759 let src_slice = &data[src as usize..(src + len) as usize];
760 unsafe {
761 let dst_start = memory.base.add(dst as usize);
762 let dst_slice = slice::from_raw_parts_mut(dst_start, len as usize);
763 dst_slice.copy_from_slice(src_slice);
764 }
765 Ok(())
766 }
767
768 pub(crate) fn data_drop(&self, data_index: DataIndex) {
770 let mut passive_data = self.passive_data.borrow_mut();
771 passive_data.remove(&data_index);
772 }
773
774 pub(crate) fn get_table(&self, table_index: TableIndex) -> &dyn Table {
777 match self.artifact.import_counts().local_table_index(table_index) {
778 Ok(local) => self.get_local_table(local),
779 Err(import) => self.get_foreign_table(import),
780 }
781 }
782
783 pub(crate) fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
785 self.tables[index].as_ref()
786 }
787
788 pub(crate) fn get_foreign_table(&self, index: TableIndex) -> &dyn Table {
790 let import = self.imported_table(index);
791 &*import.from
792 }
793}
794
795#[derive(Debug, PartialEq)]
801pub struct InstanceHandle {
802 instance: InstanceRef,
804}
805
806impl InstanceHandle {
807 #[allow(clippy::too_many_arguments)]
830 pub unsafe fn new(
831 artifact: Arc<dyn Artifact>,
832 allocator: InstanceAllocator,
833 finished_memories: BoxedSlice<LocalMemoryIndex, Arc<LinearMemory>>,
834 finished_tables: BoxedSlice<LocalTableIndex, Arc<dyn Table>>,
835 finished_globals: BoxedSlice<LocalGlobalIndex, Arc<Global>>,
836 imports: Imports,
837 passive_data: BTreeMap<DataIndex, Arc<[u8]>>,
838 host_state: Box<dyn Any>,
839 imported_function_envs: BoxedSlice<FunctionIndex, ImportFunctionEnv>,
840 instance_config: InstanceConfig,
841 ) -> Self {
842 let vmctx_globals = finished_globals
843 .values()
844 .map(|m| m.vmglobal())
845 .collect::<PrimaryMap<LocalGlobalIndex, _>>()
846 .into_boxed_slice();
847 let passive_data = RefCell::new(passive_data);
848
849 let handle = {
850 let funcrefs = PrimaryMap::new().into_boxed_slice();
852 let instance = Instance {
854 artifact,
855 config: instance_config.clone(),
856 memories: finished_memories,
857 tables: finished_tables,
858 globals: finished_globals,
859 passive_elements: Default::default(),
860 passive_data,
861 host_state,
862 funcrefs,
863 imported_function_envs,
864 vmctx: VMContext {},
865 };
866
867 let mut instance_ref = allocator.write_instance(instance);
868
869 {
871 let instance = instance_ref.as_mut().unwrap();
872 let vmctx_ptr = instance.vmctx_ptr();
873 instance.funcrefs = build_funcrefs(
874 &imports,
875 instance.artifact.functions().iter().map(|(_, f)| f),
876 vmctx_ptr,
877 );
878 unsafe {
879 *(instance.trap_catcher_ptr()) = get_trap_handler();
880 *(instance.gas_counter_ptr()) = instance_config.gas_counter;
881 *(instance.stack_limit_ptr()) = instance_config.stack_limit;
882 *(instance.stack_limit_initial_ptr()) = instance_config.stack_limit;
883 }
884 }
885
886 Self { instance: instance_ref }
887 };
888 let instance = handle.instance().as_ref();
889
890 unsafe {
891 ptr::copy(
892 instance.artifact.signatures().as_ptr(),
893 instance.signature_ids_ptr() as *mut VMSharedSignatureIndex,
894 instance.artifact.signatures().len(),
895 );
896
897 ptr::copy(
898 imports.functions.values().as_slice().as_ptr(),
899 instance.imported_functions_ptr() as *mut VMFunctionImport,
900 imports.functions.len(),
901 );
902 ptr::copy(
903 imports.tables.values().as_slice().as_ptr(),
904 instance.imported_tables_ptr() as *mut VMTableImport,
905 imports.tables.len(),
906 );
907 ptr::copy(
908 imports.memories.values().as_slice().as_ptr(),
909 instance.imported_memories_ptr() as *mut VMMemoryImport,
910 imports.memories.len(),
911 );
912 ptr::copy(
913 imports.globals.values().as_slice().as_ptr(),
914 instance.imported_globals_ptr() as *mut VMGlobalImport,
915 imports.globals.len(),
916 );
917 ptr::copy(
921 vmctx_globals.values().as_slice().as_ptr(),
922 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
923 vmctx_globals.len(),
924 );
925 ptr::write(
926 instance.builtin_functions_ptr() as *mut VMBuiltinFunctionsArray,
927 VMBuiltinFunctionsArray::initialized(),
928 );
929 }
930
931 initialize_passive_elements(instance);
934 initialize_globals(instance);
935 handle
936 }
937
938 pub fn instance(&self) -> &InstanceRef {
940 &self.instance
941 }
942
943 pub unsafe fn finish_instantiation(&self) -> Result<(), Trap> {
949 let instance = self.instance().as_ref();
950
951 initialize_tables(instance)?;
953 initialize_memories(instance, instance.artifact.data_segments().iter().map(Into::into))?;
954
955 instance.invoke_start_function()?;
958 Ok(())
959 }
960
961 pub unsafe fn invoke_function(
963 &self,
964 vmctx: VMFunctionEnvironment,
965 trampoline: VMTrampoline,
966 callee: *const VMFunctionBody,
967 values_vec: *mut u8,
968 ) -> Result<(), Trap> {
969 {
971 let instance = self.instance().as_ref();
972 instance.reset_stack_meter();
973 }
974 unsafe { near_vm_call_trampoline(vmctx, trampoline, callee, values_vec) }
975 }
976
977 pub fn vmctx(&self) -> &VMContext {
979 self.instance().as_ref().vmctx()
980 }
981
982 pub fn vmctx_ptr(&self) -> *mut VMContext {
984 self.instance().as_ref().vmctx_ptr()
985 }
986
987 pub fn vmoffsets(&self) -> &VMOffsets {
991 self.instance().as_ref().offsets()
992 }
993
994 pub fn function_by_index(&self, idx: FunctionIndex) -> Option<VMFunction> {
996 let instance = self.instance.as_ref();
997
998 let (address, signature, vmctx, call_trampoline) =
999 match instance.artifact.import_counts().local_function_index(idx) {
1000 Ok(local) => {
1001 let func = instance.artifact.functions().get(local)?;
1002 (
1003 *(func.body),
1004 func.signature,
1005 VMFunctionEnvironment { vmctx: instance.vmctx_ptr() },
1006 Some(func.trampoline),
1007 )
1008 }
1009 Err(import) => {
1010 let import = instance.imported_function(import);
1011 (*(import.body), import.signature, import.environment, import.trampoline)
1012 }
1013 };
1014 Some(VMFunction {
1015 kind: VMFunctionKind::Static,
1020 address,
1021 signature,
1022 vmctx,
1023 call_trampoline,
1024 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1025 })
1026 }
1027
1028 fn memory_by_index(&self, index: MemoryIndex) -> Option<crate::VMMemory> {
1030 let instance = self.instance.as_ref();
1031 let from = match instance.artifact.import_counts().local_memory_index(index) {
1032 Ok(local) => Arc::clone(&instance.memories[local]),
1033 Err(import) => Arc::clone(&instance.imported_memory(import).from),
1034 };
1035 Some(crate::VMMemory::new(
1036 from,
1037 Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1038 ))
1039 }
1040
1041 fn table_by_index(&self, index: TableIndex) -> Option<crate::VMTable> {
1043 let instance = self.instance.as_ref();
1044 let from = match instance.artifact.import_counts().local_table_index(index) {
1045 Ok(local) => Arc::clone(&instance.tables[local]),
1046 Err(import) => Arc::clone(&instance.imported_table(import).from),
1047 };
1048 Some(crate::VMTable {
1049 from,
1050 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1051 })
1052 }
1053
1054 pub fn global_by_index(&self, index: GlobalIndex) -> Option<VMGlobal> {
1056 let instance = self.instance.as_ref();
1057 let from = match instance.artifact.import_counts().local_global_index(index) {
1058 Ok(local) => Arc::clone(&instance.globals[local]),
1059 Err(import) => Arc::clone(&instance.imported_global(import).from),
1060 };
1061 Some(crate::VMGlobal {
1062 from,
1063 instance_ref: Some(WeakOrStrongInstanceRef::Strong(self.instance().clone())),
1064 })
1065 }
1066
1067 pub fn lookup(&self, field: &str) -> Option<VMExtern> {
1069 let instance = self.instance.as_ref();
1070 Some(match instance.artifact.export_field(field)? {
1071 ExportIndex::Function(idx) => VMExtern::Function(self.function_by_index(idx)?),
1072 ExportIndex::Table(idx) => VMExtern::Table(self.table_by_index(idx)?),
1073 ExportIndex::Global(idx) => VMExtern::Global(self.global_by_index(idx)?),
1074 ExportIndex::Memory(idx) => VMExtern::Memory(self.memory_by_index(idx)?),
1075 })
1076 }
1077
1078 pub fn host_state(&self) -> &dyn Any {
1080 self.instance().as_ref().host_state()
1081 }
1082
1083 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1085 self.instance().as_ref().memory_index(memory)
1086 }
1087
1088 pub fn memory_grow<IntoPages>(
1093 &self,
1094 memory_index: LocalMemoryIndex,
1095 delta: IntoPages,
1096 ) -> Result<Pages, MemoryError>
1097 where
1098 IntoPages: Into<Pages>,
1099 {
1100 self.instance().as_ref().memory_grow(memory_index, delta)
1101 }
1102
1103 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1105 self.instance().as_ref().table_index(table)
1106 }
1107
1108 pub fn table_grow(
1113 &self,
1114 table_index: LocalTableIndex,
1115 delta: u32,
1116 init_value: TableElement,
1117 ) -> Option<u32> {
1118 self.instance().as_ref().table_grow(table_index, delta, init_value)
1119 }
1120
1121 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1125 self.instance().as_ref().table_get(table_index, index)
1126 }
1127
1128 pub fn table_set(
1132 &self,
1133 table_index: LocalTableIndex,
1134 index: u32,
1135 val: TableElement,
1136 ) -> Result<(), Trap> {
1137 self.instance().as_ref().table_set(table_index, index, val)
1138 }
1139
1140 pub fn get_local_table(&self, index: LocalTableIndex) -> &dyn Table {
1142 self.instance().as_ref().get_local_table(index)
1143 }
1144}
1145
1146#[tracing::instrument(target = "near_vm", level = "trace", skip_all)]
1154pub unsafe fn initialize_host_envs<Err: Sized>(
1155 handle: &parking_lot::Mutex<InstanceHandle>,
1156 instance_ptr: *const ffi::c_void,
1157) -> Result<(), Err> {
1158 let initializers = {
1159 let mut instance_lock = handle.lock();
1160 let instance_ref = unsafe { instance_lock.instance.as_mut_unchecked() };
1161 let mut initializers = vec![];
1162 for import_function_env in instance_ref.imported_function_envs.values_mut() {
1163 match import_function_env {
1164 ImportFunctionEnv::Env { env, initializer, .. } => {
1165 if let Some(init) = initializer.take() {
1166 initializers.push((init, *env));
1167 }
1168 }
1169 ImportFunctionEnv::NoEnv => (),
1170 }
1171 }
1172 initializers
1173 };
1174 for (init, env) in initializers {
1175 let f = unsafe {
1178 mem::transmute::<&ImportInitializerFuncPtr, &ImportInitializerFuncPtr<Err>>(&init)
1179 };
1180 f(env, instance_ptr)?;
1181 }
1182 Ok(())
1183}
1184
1185fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1187 let mut start = init.location.offset;
1188 if let Some(base) = init.location.base {
1189 let val = unsafe { instance.global(base).to_u32() };
1191 start += usize::try_from(val).unwrap();
1192 }
1193 start
1194}
1195
1196#[allow(clippy::mut_from_ref)]
1197unsafe fn get_memory_slice<'instance>(
1203 init: &DataInitializer<'_>,
1204 instance: &'instance Instance,
1205) -> &'instance mut [u8] {
1206 let memory = instance.memory_definition(init.location.memory_index);
1207 unsafe { slice::from_raw_parts_mut(memory.base, memory.current_length) }
1208}
1209
1210fn get_table_init_start(init: &OwnedTableInitializer, instance: &Instance) -> usize {
1212 let mut start = init.offset;
1213 if let Some(base) = init.base {
1214 let val = unsafe { instance.global(base).to_u32() };
1216 start += usize::try_from(val).unwrap();
1217 }
1218 start
1219}
1220
1221fn initialize_tables(instance: &Instance) -> Result<(), Trap> {
1223 for init in instance.artifact.element_segments() {
1224 let start = get_table_init_start(init, instance);
1225 let table = instance.get_table(init.table_index);
1226
1227 if start.checked_add(init.elements.len()).map_or(true, |end| end > table.size() as usize) {
1228 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1229 }
1230
1231 for (i, func_idx) in init.elements.iter().enumerate() {
1232 let anyfunc = instance.get_vm_funcref(*func_idx);
1233 table.set(u32::try_from(start + i).unwrap(), TableElement::FuncRef(anyfunc)).unwrap();
1234 }
1235 }
1236
1237 Ok(())
1238}
1239
1240fn initialize_passive_elements(instance: &Instance) {
1244 let mut passive_elements = instance.passive_elements.borrow_mut();
1245 debug_assert!(
1246 passive_elements.is_empty(),
1247 "should only be called once, at initialization time"
1248 );
1249
1250 passive_elements.extend(
1251 instance
1252 .artifact
1253 .passive_elements()
1254 .iter()
1255 .filter(|(_, segments)| !segments.is_empty())
1256 .map(|(idx, segments)| {
1257 (*idx, segments.iter().map(|s| instance.get_vm_funcref(*s)).collect())
1258 }),
1259 );
1260}
1261
1262fn initialize_memories<'a>(
1264 instance: &Instance,
1265 data_initializers: impl Iterator<Item = DataInitializer<'a>>,
1266) -> Result<(), Trap> {
1267 for init in data_initializers {
1268 let memory = instance.memory_definition(init.location.memory_index);
1269
1270 let start = get_memory_init_start(&init, instance);
1271 if start.checked_add(init.data.len()).map_or(true, |end| end > memory.current_length) {
1272 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1273 }
1274
1275 unsafe {
1276 let mem_slice = get_memory_slice(&init, instance);
1277 let end = start + init.data.len();
1278 let to_init = &mut mem_slice[start..end];
1279 to_init.copy_from_slice(init.data);
1280 }
1281 }
1282
1283 Ok(())
1284}
1285
1286fn initialize_globals(instance: &Instance) {
1287 for (index, (_, initializer)) in instance.artifact.globals().iter().enumerate() {
1288 unsafe {
1289 let to = instance.global_ptr(LocalGlobalIndex::new(index)).as_ptr();
1290 match initializer {
1291 GlobalInit::I32Const(x) => *(*to).as_i32_mut() = *x,
1292 GlobalInit::I64Const(x) => *(*to).as_i64_mut() = *x,
1293 GlobalInit::F32Const(x) => *(*to).as_f32_mut() = *x,
1294 GlobalInit::F64Const(x) => *(*to).as_f64_mut() = *x,
1295 GlobalInit::V128Const(x) => *(*to).as_bytes_mut() = *x.bytes(),
1296 GlobalInit::GetGlobal(x) => *to = instance.global(*x).clone(),
1297 GlobalInit::RefNullConst => *(*to).as_funcref_mut() = VMFuncRef::null(),
1298 GlobalInit::RefFunc(func_idx) => {
1299 let funcref = instance.func_ref(*func_idx).unwrap();
1300 *(*to).as_funcref_mut() = funcref;
1301 }
1302 }
1303 }
1304 }
1305}
1306
1307pub fn build_funcrefs<'a>(
1310 imports: &Imports,
1311 finished_functions: impl ExactSizeIterator<Item = &'a VMLocalFunction>,
1312 vmctx_ptr: *mut VMContext,
1314) -> BoxedSlice<FunctionIndex, VMCallerCheckedAnyfunc> {
1315 let mut func_refs =
1316 PrimaryMap::with_capacity(imports.functions.len() + finished_functions.len());
1317 for (_, import) in &imports.functions {
1318 let anyfunc = VMCallerCheckedAnyfunc {
1319 func_ptr: *(import.body),
1320 type_index: import.signature,
1321 vmctx: import.environment,
1322 };
1323 func_refs.push(anyfunc);
1324 }
1325 for function in finished_functions {
1327 let anyfunc = VMCallerCheckedAnyfunc {
1328 func_ptr: *(function.body),
1329 type_index: function.signature,
1330 vmctx: VMFunctionEnvironment { vmctx: vmctx_ptr },
1331 };
1332 func_refs.push(anyfunc);
1333 }
1334 func_refs.into_boxed_slice()
1335}