#[repr(C)]
pub struct VMMemoryDefinition { pub base: *mut u8, pub current_length: AtomicUsize, }
Expand description

The fields compiled code needs to access to utilize a WebAssembly linear memory defined within the instance, namely the start address and the size in bytes.

Fields§

§base: *mut u8

The start address.

§current_length: AtomicUsize

The current logical size of this linear memory in bytes.

This is atomic because shared memories must be able to grow their length atomically. For relaxed access, see VMMemoryDefinition::current_length().

Implementations§

Return the current length of the VMMemoryDefinition by performing a relaxed load; do not use this function for situations in which a precise length is needed. Owned memories (i.e., non-shared) will always return a precise result (since no concurrent modification is possible) but shared memories may see an imprecise value–a current_length potentially smaller than what some other thread observes. Since Wasm memory only grows, this under-estimation may be acceptable in certain cases.

Examples found in repository?
src/vmcontext.rs (line 248)
244
245
246
247
248
249
250
    pub unsafe fn load(ptr: *mut Self) -> Self {
        let other = &*ptr;
        VMMemoryDefinition {
            base: other.base,
            current_length: other.current_length().into(),
        }
    }
More examples
Hide additional examples
src/memory.rs (line 894)
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
fn validate_atomic_addr(
    def: &VMMemoryDefinition,
    addr: u64,
    access_size: u64,
    access_alignment: u64,
) -> Result<*mut u8, Trap> {
    debug_assert!(access_alignment.is_power_of_two());
    if !(addr % access_alignment == 0) {
        return Err(Trap::HeapMisaligned);
    }

    let length = u64::try_from(def.current_length()).unwrap();
    if !(addr.saturating_add(access_size) < length) {
        return Err(Trap::MemoryOutOfBounds);
    }

    Ok(def.base.wrapping_add(addr as usize))
}
src/instance/allocator.rs (line 256)
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
fn check_memory_init_bounds(instance: &Instance, initializers: &[MemoryInitializer]) -> Result<()> {
    for init in initializers {
        let memory = instance.get_memory(init.memory_index);
        let start = get_memory_init_start(init, instance)?;
        let end = usize::try_from(start)
            .ok()
            .and_then(|start| start.checked_add(init.data.len()));

        match end {
            Some(end) if end <= memory.current_length() => {
                // Initializer is in bounds
            }
            _ => {
                bail!("memory out of bounds: data segment does not fit")
            }
        }
    }

    Ok(())
}

fn initialize_memories(instance: &mut Instance, module: &Module) -> Result<()> {
    let memory_size_in_pages =
        &|memory| (instance.get_memory(memory).current_length() as u64) / u64::from(WASM_PAGE_SIZE);

    // Loads the `global` value and returns it as a `u64`, but sign-extends
    // 32-bit globals which can be used as the base for 32-bit memories.
    let get_global_as_u64 = &|global| unsafe {
        let def = if let Some(def_index) = instance.module().defined_global_index(global) {
            instance.global(def_index)
        } else {
            &*instance.imported_global(global).from
        };
        if module.globals[global].wasm_ty == WasmType::I64 {
            *def.as_u64()
        } else {
            u64::from(*def.as_u32())
        }
    };

    // Delegates to the `init_memory` method which is sort of a duplicate of
    // `instance.memory_init_segment` but is used at compile-time in other
    // contexts so is shared here to have only one method of memory
    // initialization.
    //
    // This call to `init_memory` notably implements all the bells and whistles
    // so errors only happen if an out-of-bounds segment is found, in which case
    // a trap is returned.
    let ok = module.memory_initialization.init_memory(
        InitMemory::Runtime {
            memory_size_in_pages,
            get_global_as_u64,
        },
        &mut |memory_index, init| {
            // If this initializer applies to a defined memory but that memory
            // doesn't need initialization, due to something like copy-on-write
            // pre-initializing it via mmap magic, then this initializer can be
            // skipped entirely.
            if let Some(memory_index) = module.defined_memory_index(memory_index) {
                if !instance.memories[memory_index].needs_init() {
                    return true;
                }
            }
            let memory = instance.get_memory(memory_index);

            unsafe {
                let src = instance.wasm_data(init.data.clone());
                let dst = memory.base.add(usize::try_from(init.offset).unwrap());
                // FIXME audit whether this is safe in the presence of shared
                // memory
                // (https://github.com/bytecodealliance/wasmtime/issues/4203).
                ptr::copy_nonoverlapping(src.as_ptr(), dst, src.len())
            }
            true
        },
    );
    if !ok {
        return Err(Trap::MemoryOutOfBounds.into());
    }

    Ok(())
}
src/instance.rs (line 684)
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
    pub(crate) fn memory_copy(
        &mut self,
        dst_index: MemoryIndex,
        dst: u64,
        src_index: MemoryIndex,
        src: u64,
        len: u64,
    ) -> Result<(), Trap> {
        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy

        let src_mem = self.get_memory(src_index);
        let dst_mem = self.get_memory(dst_index);

        let src = self.validate_inbounds(src_mem.current_length(), src, len)?;
        let dst = self.validate_inbounds(dst_mem.current_length(), dst, len)?;

        // Bounds and casts are checked above, by this point we know that
        // everything is safe.
        unsafe {
            let dst = dst_mem.base.add(dst);
            let src = src_mem.base.add(src);
            // FIXME audit whether this is safe in the presence of shared memory
            // (https://github.com/bytecodealliance/wasmtime/issues/4203).
            ptr::copy(src, dst, len as usize);
        }

        Ok(())
    }

    fn validate_inbounds(&self, max: usize, ptr: u64, len: u64) -> Result<usize, Trap> {
        let oob = || Trap::MemoryOutOfBounds;
        let end = ptr
            .checked_add(len)
            .and_then(|i| usize::try_from(i).ok())
            .ok_or_else(oob)?;
        if end > max {
            Err(oob())
        } else {
            Ok(ptr as usize)
        }
    }

    /// Perform the `memory.fill` operation on a locally defined memory.
    ///
    /// # Errors
    ///
    /// Returns a `Trap` error if the memory range is out of bounds.
    pub(crate) fn memory_fill(
        &mut self,
        memory_index: MemoryIndex,
        dst: u64,
        val: u8,
        len: u64,
    ) -> Result<(), Trap> {
        let memory = self.get_memory(memory_index);
        let dst = self.validate_inbounds(memory.current_length(), dst, len)?;

        // Bounds and casts are checked above, by this point we know that
        // everything is safe.
        unsafe {
            let dst = memory.base.add(dst);
            // FIXME audit whether this is safe in the presence of shared memory
            // (https://github.com/bytecodealliance/wasmtime/issues/4203).
            ptr::write_bytes(dst, val, len as usize);
        }

        Ok(())
    }

    /// Performs the `memory.init` operation.
    ///
    /// # Errors
    ///
    /// Returns a `Trap` error if the destination range is out of this module's
    /// memory's bounds or if the source range is outside the data segment's
    /// bounds.
    pub(crate) fn memory_init(
        &mut self,
        memory_index: MemoryIndex,
        data_index: DataIndex,
        dst: u64,
        src: u32,
        len: u32,
    ) -> Result<(), Trap> {
        let range = match self.module().passive_data_map.get(&data_index).cloned() {
            Some(range) if !self.dropped_data.contains(data_index) => range,
            _ => 0..0,
        };
        self.memory_init_segment(memory_index, range, dst, src, len)
    }

    pub(crate) fn wasm_data(&self, range: Range<u32>) -> &[u8] {
        &self.runtime_info.wasm_data()[range.start as usize..range.end as usize]
    }

    pub(crate) fn memory_init_segment(
        &mut self,
        memory_index: MemoryIndex,
        range: Range<u32>,
        dst: u64,
        src: u32,
        len: u32,
    ) -> Result<(), Trap> {
        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init

        let memory = self.get_memory(memory_index);
        let data = self.wasm_data(range);
        let dst = self.validate_inbounds(memory.current_length(), dst, len.into())?;
        let src = self.validate_inbounds(data.len(), src.into(), len.into())?;
        let len = len as usize;

        unsafe {
            let src_start = data.as_ptr().add(src);
            let dst_start = memory.base.add(dst);
            // FIXME audit whether this is safe in the presence of shared memory
            // (https://github.com/bytecodealliance/wasmtime/issues/4203).
            ptr::copy_nonoverlapping(src_start, dst_start, len);
        }

        Ok(())
    }

Return a copy of the VMMemoryDefinition using the relaxed value of current_length; see VMMemoryDefinition::current_length().

Examples found in repository?
src/instance.rs (line 194)
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
    pub(crate) fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
        if let Some(defined_index) = self.module().defined_memory_index(index) {
            self.memory(defined_index)
        } else {
            let import = self.imported_memory(index);
            unsafe { VMMemoryDefinition::load(import.from) }
        }
    }

    /// Get a locally defined or imported memory.
    pub(crate) fn get_runtime_memory(&mut self, index: MemoryIndex) -> &mut Memory {
        if let Some(defined_index) = self.module().defined_memory_index(index) {
            unsafe { &mut *self.get_defined_memory(defined_index) }
        } else {
            let import = self.imported_memory(index);
            let ctx = unsafe { &mut *import.vmctx };
            unsafe { &mut *ctx.instance_mut().get_defined_memory(import.index) }
        }
    }

    /// Return the indexed `VMMemoryDefinition`.
    fn memory(&self, index: DefinedMemoryIndex) -> VMMemoryDefinition {
        unsafe { VMMemoryDefinition::load(self.memory_ptr(index)) }
    }

Trait Implementations§

Formats the value using the given formatter. Read more

Auto Trait Implementations§

Blanket Implementations§

Gets the TypeId of self. Read more
Immutably borrows from an owned value. Read more
Mutably borrows from an owned value. Read more

Returns the argument unchanged.

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

The type returned in the event of a conversion error.
Performs the conversion.
The type returned in the event of a conversion error.
Performs the conversion.