1use crate::table::{Table, TableElementType};
58use crate::vmcontext::VMFuncRef;
59use crate::{Instance, TrapReason, VMGcRef};
60#[cfg(feature = "wmemcheck")]
61use anyhow::bail;
62use anyhow::Result;
63#[cfg(feature = "threads")]
64use std::time::{Duration, Instant};
65use wasmtime_environ::{DataIndex, ElemIndex, FuncIndex, MemoryIndex, TableIndex, Trap, Unsigned};
66#[cfg(feature = "wmemcheck")]
67use wasmtime_wmemcheck::AccessError::{
68 DoubleMalloc, InvalidFree, InvalidRead, InvalidWrite, OutOfBounds,
69};
70
71pub mod raw {
86 #![allow(unused_doc_comments, unused_attributes)]
89
90 use crate::{Instance, TrapReason, VMContext};
91
92 macro_rules! libcall {
93 (
94 $(
95 $( #[cfg($attr:meta)] )?
96 $name:ident( vmctx: vmctx $(, $pname:ident: $param:ident )* ) $( -> $result:ident )?;
97 )*
98 ) => {
99 $(
100 #[allow(unused_variables, missing_docs)]
107 pub unsafe extern "C" fn $name(
108 vmctx: *mut VMContext,
109 $( $pname : libcall!(@ty $param), )*
110 ) $( -> libcall!(@ty $result))? {
111 $(#[cfg($attr)])?
112 {
113 let ret = crate::traphandlers::catch_unwind_and_longjmp(|| {
114 Instance::from_vmctx(vmctx, |instance| {
115 {
116 super::$name(instance, $($pname),*)
117 }
118 })
119 });
120 LibcallResult::convert(ret)
121 }
122 $(
123 #[cfg(not($attr))]
124 std::process::abort();
125 )?
126 }
127
128 #[allow(non_upper_case_globals)]
132 const _: () = {
133 #[used]
134 static I_AM_USED: unsafe extern "C" fn(
135 *mut VMContext,
136 $( $pname : libcall!(@ty $param), )*
137 ) $( -> libcall!(@ty $result))? = $name;
138 };
139 )*
140 };
141
142 (@ty i32) => (u32);
143 (@ty i64) => (u64);
144 (@ty reference) => (*mut u8);
145 (@ty pointer) => (*mut u8);
146 (@ty vmctx) => (*mut VMContext);
147 }
148
149 wasmtime_environ::foreach_builtin_function!(libcall);
150
151 trait LibcallResult {
157 type Abi;
158 unsafe fn convert(self) -> Self::Abi;
159 }
160
161 impl LibcallResult for () {
162 type Abi = ();
163 unsafe fn convert(self) {}
164 }
165
166 impl<T, E> LibcallResult for Result<T, E>
167 where
168 E: Into<TrapReason>,
169 {
170 type Abi = T;
171 unsafe fn convert(self) -> T {
172 match self {
173 Ok(t) => t,
174 Err(e) => crate::traphandlers::raise_trap(e.into()),
175 }
176 }
177 }
178
179 impl LibcallResult for *mut u8 {
180 type Abi = *mut u8;
181 unsafe fn convert(self) -> *mut u8 {
182 self
183 }
184 }
185}
186
187fn memory32_grow(
188 instance: &mut Instance,
189 delta: u64,
190 memory_index: u32,
191) -> Result<*mut u8, TrapReason> {
192 let memory_index = MemoryIndex::from_u32(memory_index);
193 let result =
194 match instance
195 .memory_grow(memory_index, delta)
196 .map_err(|error| TrapReason::User {
197 error,
198 needs_backtrace: true,
199 })? {
200 Some(size_in_bytes) => size_in_bytes / (wasmtime_environ::WASM_PAGE_SIZE as usize),
201 None => usize::max_value(),
202 };
203 Ok(result as *mut _)
204}
205
206unsafe fn table_grow(
208 instance: &mut Instance,
209 table_index: u32,
210 delta: u32,
211 init_value: *mut u8,
214) -> Result<u32> {
215 let table_index = TableIndex::from_u32(table_index);
216
217 let element = match instance.table_element_type(table_index) {
218 TableElementType::Func => (init_value as *mut VMFuncRef).into(),
219 TableElementType::GcRef => VMGcRef::from_r64(u64::try_from(init_value as usize).unwrap())
220 .unwrap()
221 .map(|r| (*instance.store()).gc_store().clone_gc_ref(&r))
222 .into(),
223 };
224
225 Ok(match instance.table_grow(table_index, delta, element)? {
226 Some(r) => r,
227 None => (-1_i32).unsigned(),
228 })
229}
230
231use table_grow as table_grow_func_ref;
232
233#[cfg(feature = "gc")]
234use table_grow as table_grow_gc_ref;
235
236unsafe fn table_fill(
238 instance: &mut Instance,
239 table_index: u32,
240 dst: u32,
241 val: *mut u8,
244 len: u32,
245) -> Result<(), Trap> {
246 let table_index = TableIndex::from_u32(table_index);
247 let table = &mut *instance.get_table(table_index);
248 match table.element_type() {
249 TableElementType::Func => {
250 let val = val.cast::<VMFuncRef>();
251 table.fill((*instance.store()).gc_store(), dst, val.into(), len)
252 }
253
254 TableElementType::GcRef => {
255 let gc_store = (*instance.store()).gc_store();
256 let gc_ref = VMGcRef::from_r64(u64::try_from(val as usize).unwrap()).unwrap();
257 let gc_ref = gc_ref.map(|r| gc_store.clone_gc_ref(&r));
258 table.fill(gc_store, dst, gc_ref.into(), len)
259 }
260 }
261}
262
263use table_fill as table_fill_func_ref;
264
265#[cfg(feature = "gc")]
266use table_fill as table_fill_gc_ref;
267
268unsafe fn table_copy(
270 instance: &mut Instance,
271 dst_table_index: u32,
272 src_table_index: u32,
273 dst: u32,
274 src: u32,
275 len: u32,
276) -> Result<(), Trap> {
277 let dst_table_index = TableIndex::from_u32(dst_table_index);
278 let src_table_index = TableIndex::from_u32(src_table_index);
279 let dst_table = instance.get_table(dst_table_index);
280 let src_range = src..(src.checked_add(len).unwrap_or(u32::MAX));
282 let src_table = instance.get_table_with_lazy_init(src_table_index, src_range);
283 let gc_store = (*instance.store()).gc_store();
284 Table::copy(gc_store, dst_table, src_table, dst, src, len)
285}
286
287fn table_init(
289 instance: &mut Instance,
290 table_index: u32,
291 elem_index: u32,
292 dst: u32,
293 src: u32,
294 len: u32,
295) -> Result<(), Trap> {
296 let table_index = TableIndex::from_u32(table_index);
297 let elem_index = ElemIndex::from_u32(elem_index);
298 instance.table_init(table_index, elem_index, dst, src, len)
299}
300
301fn elem_drop(instance: &mut Instance, elem_index: u32) {
303 let elem_index = ElemIndex::from_u32(elem_index);
304 instance.elem_drop(elem_index)
305}
306
307fn memory_copy(
309 instance: &mut Instance,
310 dst_index: u32,
311 dst: u64,
312 src_index: u32,
313 src: u64,
314 len: u64,
315) -> Result<(), Trap> {
316 let src_index = MemoryIndex::from_u32(src_index);
317 let dst_index = MemoryIndex::from_u32(dst_index);
318 instance.memory_copy(dst_index, dst, src_index, src, len)
319}
320
321fn memory_fill(
323 instance: &mut Instance,
324 memory_index: u32,
325 dst: u64,
326 val: u32,
327 len: u64,
328) -> Result<(), Trap> {
329 let memory_index = MemoryIndex::from_u32(memory_index);
330 instance.memory_fill(memory_index, dst, val as u8, len)
331}
332
333fn memory_init(
335 instance: &mut Instance,
336 memory_index: u32,
337 data_index: u32,
338 dst: u64,
339 src: u32,
340 len: u32,
341) -> Result<(), Trap> {
342 let memory_index = MemoryIndex::from_u32(memory_index);
343 let data_index = DataIndex::from_u32(data_index);
344 instance.memory_init(memory_index, data_index, dst, src, len)
345}
346
347fn ref_func(instance: &mut Instance, func_index: u32) -> *mut u8 {
349 instance
350 .get_func_ref(FuncIndex::from_u32(func_index))
351 .expect("ref_func: funcref should always be available for given func index")
352 .cast()
353}
354
355fn data_drop(instance: &mut Instance, data_index: u32) {
357 let data_index = DataIndex::from_u32(data_index);
358 instance.data_drop(data_index)
359}
360
361unsafe fn table_get_lazy_init_func_ref(
363 instance: &mut Instance,
364 table_index: u32,
365 index: u32,
366) -> *mut u8 {
367 let table_index = TableIndex::from_u32(table_index);
368 let table = instance.get_table_with_lazy_init(table_index, std::iter::once(index));
369 let gc_store = (*instance.store()).gc_store();
370 let elem = (*table)
371 .get(gc_store, index)
372 .expect("table access already bounds-checked");
373
374 elem.into_func_ref_asserting_initialized().cast()
375}
376
377#[cfg(feature = "gc")]
379unsafe fn drop_gc_ref(instance: &mut Instance, gc_ref: *mut u8) {
380 let gc_ref = VMGcRef::from_r64(u64::try_from(gc_ref as usize).unwrap())
381 .expect("valid r64")
382 .expect("non-null VMGcRef");
383 log::trace!("libcalls::drop_gc_ref({gc_ref:?})");
384 (*instance.store()).gc_store().drop_gc_ref(gc_ref);
385}
386
387#[cfg(feature = "gc")]
390unsafe fn gc(instance: &mut Instance, gc_ref: *mut u8) -> Result<*mut u8> {
391 let gc_ref = u64::try_from(gc_ref as usize).unwrap();
392 let gc_ref = VMGcRef::from_r64(gc_ref).expect("valid r64");
393 let gc_ref = gc_ref.map(|r| (*instance.store()).gc_store().clone_gc_ref(&r));
394
395 if let Some(gc_ref) = &gc_ref {
396 let gc_store = (*instance.store()).gc_store();
404 let gc_ref = gc_store.clone_gc_ref(gc_ref);
405 gc_store.expose_gc_ref_to_wasm(gc_ref);
406 }
407
408 match (*instance.store()).gc(gc_ref)? {
409 None => Ok(std::ptr::null_mut()),
410 Some(r) => {
411 let r64 = r.as_r64();
412 (*instance.store()).gc_store().expose_gc_ref_to_wasm(r);
413 Ok(usize::try_from(r64).unwrap() as *mut u8)
414 }
415 }
416}
417
418#[cfg(feature = "gc")]
420unsafe fn gc_ref_global_get(instance: &mut Instance, index: u32) -> Result<*mut u8> {
421 use std::num::NonZeroUsize;
422
423 let index = wasmtime_environ::GlobalIndex::from_u32(index);
424 let global = instance.defined_or_imported_global_ptr(index);
425 let gc_store = (*instance.store()).gc_store();
426
427 if gc_store
428 .gc_heap
429 .need_gc_before_entering_wasm(NonZeroUsize::new(1).unwrap())
430 {
431 (*instance.store()).gc(None)?;
432 }
433
434 match (*global).as_gc_ref() {
435 None => Ok(std::ptr::null_mut()),
436 Some(gc_ref) => {
437 let gc_ref = gc_store.clone_gc_ref(gc_ref);
438 let ret = usize::try_from(gc_ref.as_r64()).unwrap() as *mut u8;
439 gc_store.expose_gc_ref_to_wasm(gc_ref);
440 Ok(ret)
441 }
442 }
443}
444
445#[cfg(feature = "gc")]
447unsafe fn gc_ref_global_set(instance: &mut Instance, index: u32, gc_ref: *mut u8) {
448 let index = wasmtime_environ::GlobalIndex::from_u32(index);
449 let global = instance.defined_or_imported_global_ptr(index);
450 let gc_ref = VMGcRef::from_r64(u64::try_from(gc_ref as usize).unwrap()).expect("valid r64");
451 let gc_store = (*instance.store()).gc_store();
452 (*global).write_gc_ref(gc_store, gc_ref.as_ref());
453}
454
455#[cfg(feature = "threads")]
457fn memory_atomic_notify(
458 instance: &mut Instance,
459 memory_index: u32,
460 addr_index: u64,
461 count: u32,
462) -> Result<u32, Trap> {
463 let memory = MemoryIndex::from_u32(memory_index);
464 instance
465 .get_runtime_memory(memory)
466 .atomic_notify(addr_index, count)
467}
468
469#[cfg(feature = "threads")]
471fn memory_atomic_wait32(
472 instance: &mut Instance,
473 memory_index: u32,
474 addr_index: u64,
475 expected: u32,
476 timeout: u64,
477) -> Result<u32, Trap> {
478 let timeout = (timeout as i64 >= 0).then(|| Instant::now() + Duration::from_nanos(timeout));
480 let memory = MemoryIndex::from_u32(memory_index);
481 Ok(instance
482 .get_runtime_memory(memory)
483 .atomic_wait32(addr_index, expected, timeout)? as u32)
484}
485
486#[cfg(feature = "threads")]
488fn memory_atomic_wait64(
489 instance: &mut Instance,
490 memory_index: u32,
491 addr_index: u64,
492 expected: u64,
493 timeout: u64,
494) -> Result<u32, Trap> {
495 let timeout = (timeout as i64 >= 0).then(|| Instant::now() + Duration::from_nanos(timeout));
497 let memory = MemoryIndex::from_u32(memory_index);
498 Ok(instance
499 .get_runtime_memory(memory)
500 .atomic_wait64(addr_index, expected, timeout)? as u32)
501}
502
503unsafe fn out_of_gas(instance: &mut Instance) -> Result<()> {
505 (*instance.store()).out_of_gas()
506}
507
508unsafe fn new_epoch(instance: &mut Instance) -> Result<u64> {
510 (*instance.store()).new_epoch()
511}
512
513#[cfg(feature = "wmemcheck")]
515unsafe fn check_malloc(instance: &mut Instance, addr: u32, len: u32) -> Result<u32> {
516 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
517 let result = wmemcheck_state.malloc(addr as usize, len as usize);
518 wmemcheck_state.memcheck_on();
519 match result {
520 Ok(()) => {
521 return Ok(0);
522 }
523 Err(DoubleMalloc { addr, len }) => {
524 bail!("Double malloc at addr {:#x} of size {}", addr, len)
525 }
526 Err(OutOfBounds { addr, len }) => {
527 bail!("Malloc out of bounds at addr {:#x} of size {}", addr, len);
528 }
529 _ => {
530 panic!("unreachable")
531 }
532 }
533 }
534 Ok(0)
535}
536
537#[cfg(feature = "wmemcheck")]
539unsafe fn check_free(instance: &mut Instance, addr: u32) -> Result<u32> {
540 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
541 let result = wmemcheck_state.free(addr as usize);
542 wmemcheck_state.memcheck_on();
543 match result {
544 Ok(()) => {
545 return Ok(0);
546 }
547 Err(InvalidFree { addr }) => {
548 bail!("Invalid free at addr {:#x}", addr)
549 }
550 _ => {
551 panic!("unreachable")
552 }
553 }
554 }
555 Ok(0)
556}
557
558#[cfg(feature = "wmemcheck")]
560fn check_load(instance: &mut Instance, num_bytes: u32, addr: u32, offset: u32) -> Result<u32> {
561 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
562 let result = wmemcheck_state.read(addr as usize + offset as usize, num_bytes as usize);
563 match result {
564 Ok(()) => {
565 return Ok(0);
566 }
567 Err(InvalidRead { addr, len }) => {
568 bail!("Invalid load at addr {:#x} of size {}", addr, len);
569 }
570 Err(OutOfBounds { addr, len }) => {
571 bail!("Load out of bounds at addr {:#x} of size {}", addr, len);
572 }
573 _ => {
574 panic!("unreachable")
575 }
576 }
577 }
578 Ok(0)
579}
580
581#[cfg(feature = "wmemcheck")]
583fn check_store(instance: &mut Instance, num_bytes: u32, addr: u32, offset: u32) -> Result<u32> {
584 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
585 let result = wmemcheck_state.write(addr as usize + offset as usize, num_bytes as usize);
586 match result {
587 Ok(()) => {
588 return Ok(0);
589 }
590 Err(InvalidWrite { addr, len }) => {
591 bail!("Invalid store at addr {:#x} of size {}", addr, len)
592 }
593 Err(OutOfBounds { addr, len }) => {
594 bail!("Store out of bounds at addr {:#x} of size {}", addr, len)
595 }
596 _ => {
597 panic!("unreachable")
598 }
599 }
600 }
601 Ok(0)
602}
603
604#[cfg(feature = "wmemcheck")]
606fn malloc_start(instance: &mut Instance) {
607 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
608 wmemcheck_state.memcheck_off();
609 }
610}
611
612#[cfg(feature = "wmemcheck")]
614fn free_start(instance: &mut Instance) {
615 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
616 wmemcheck_state.memcheck_off();
617 }
618}
619
620#[cfg(feature = "wmemcheck")]
622fn update_stack_pointer(_instance: &mut Instance, _value: u32) {
623 }
630
631#[cfg(feature = "wmemcheck")]
633fn update_mem_size(instance: &mut Instance, num_pages: u32) {
634 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
635 const KIB: usize = 1024;
636 let num_bytes = num_pages as usize * 64 * KIB;
637 wmemcheck_state.update_mem_size(num_bytes);
638 }
639}
640
641#[allow(missing_docs)]
650pub mod relocs {
651 pub extern "C" fn floorf32(f: f32) -> f32 {
652 f.floor()
653 }
654
655 pub extern "C" fn floorf64(f: f64) -> f64 {
656 f.floor()
657 }
658
659 pub extern "C" fn ceilf32(f: f32) -> f32 {
660 f.ceil()
661 }
662
663 pub extern "C" fn ceilf64(f: f64) -> f64 {
664 f.ceil()
665 }
666
667 pub extern "C" fn truncf32(f: f32) -> f32 {
668 f.trunc()
669 }
670
671 pub extern "C" fn truncf64(f: f64) -> f64 {
672 f.trunc()
673 }
674
675 const TOINT_32: f32 = 1.0 / f32::EPSILON;
676 const TOINT_64: f64 = 1.0 / f64::EPSILON;
677
678 pub extern "C" fn nearestf32(x: f32) -> f32 {
681 let i = x.to_bits();
686 let e = i >> 23 & 0xff;
687 if e >= 0x7f_u32 + 23 {
688 if e == 0xff {
690 if i & 0x7fffff != 0 {
692 return f32::from_bits(i | (1 << 22));
695 }
696 }
697 x
698 } else {
699 (x.abs() + TOINT_32 - TOINT_32).copysign(x)
700 }
701 }
702
703 pub extern "C" fn nearestf64(x: f64) -> f64 {
704 let i = x.to_bits();
705 let e = i >> 52 & 0x7ff;
706 if e >= 0x3ff_u64 + 52 {
707 if e == 0x7ff {
709 if i & 0xfffffffffffff != 0 {
711 return f64::from_bits(i | (1 << 51));
714 }
715 }
716 x
717 } else {
718 (x.abs() + TOINT_64 - TOINT_64).copysign(x)
719 }
720 }
721
722 pub extern "C" fn fmaf32(a: f32, b: f32, c: f32) -> f32 {
723 a.mul_add(b, c)
724 }
725
726 pub extern "C" fn fmaf64(a: f64, b: f64, c: f64) -> f64 {
727 a.mul_add(b, c)
728 }
729
730 #[cfg(target_arch = "x86_64")]
733 use std::arch::x86_64::__m128i;
734 #[cfg(target_arch = "x86_64")]
735 #[allow(improper_ctypes_definitions)]
736 pub extern "C" fn x86_pshufb(a: __m128i, b: __m128i) -> __m128i {
737 union U {
738 reg: __m128i,
739 mem: [u8; 16],
740 }
741
742 unsafe {
743 let a = U { reg: a }.mem;
744 let b = U { reg: b }.mem;
745
746 let select = |arr: &[u8; 16], byte: u8| {
747 if byte & 0x80 != 0 {
748 0x00
749 } else {
750 arr[(byte & 0xf) as usize]
751 }
752 };
753
754 U {
755 mem: [
756 select(&a, b[0]),
757 select(&a, b[1]),
758 select(&a, b[2]),
759 select(&a, b[3]),
760 select(&a, b[4]),
761 select(&a, b[5]),
762 select(&a, b[6]),
763 select(&a, b[7]),
764 select(&a, b[8]),
765 select(&a, b[9]),
766 select(&a, b[10]),
767 select(&a, b[11]),
768 select(&a, b[12]),
769 select(&a, b[13]),
770 select(&a, b[14]),
771 select(&a, b[15]),
772 ],
773 }
774 .reg
775 }
776 }
777}