wasmtime_environ/component/
vmcomponent_offsets.rs

1// Currently the `VMComponentContext` allocation by field looks like this:
2//
3// struct VMComponentContext {
4//      magic: u32,
5//      libcalls: &'static VMComponentLibcalls,
6//      store: *mut dyn Store,
7//      limits: *const VMRuntimeLimits,
8//      flags: [VMGlobalDefinition; component.num_runtime_component_instances],
9//      trampoline_func_refs: [VMFuncRef; component.num_trampolines],
10//      lowerings: [VMLowering; component.num_lowerings],
11//      memories: [*mut VMMemoryDefinition; component.num_memories],
12//      reallocs: [*mut VMFuncRef; component.num_reallocs],
13//      post_returns: [*mut VMFuncRef; component.num_post_returns],
14//      resource_destructors: [*mut VMFuncRef; component.num_resources],
15// }
16
17use crate::component::*;
18use crate::PtrSize;
19
20/// Equivalent of `VMCONTEXT_MAGIC` except for components.
21///
22/// This is stored at the start of all `VMComponentContext` structures and
23/// double-checked on `VMComponentContext::from_opaque`.
24pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
25
26/// Flag for the `VMComponentContext::flags` field which corresponds to the
27/// canonical ABI flag `may_leave`
28pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
29
30/// Flag for the `VMComponentContext::flags` field which corresponds to the
31/// canonical ABI flag `may_enter`
32pub const FLAG_MAY_ENTER: i32 = 1 << 1;
33
34/// Flag for the `VMComponentContext::flags` field which is set whenever a
35/// function is called to indicate that `post_return` must be called next.
36pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
37
38/// Runtime offsets within a `VMComponentContext` for a specific component.
39#[derive(Debug, Clone, Copy)]
40pub struct VMComponentOffsets<P> {
41    /// The host pointer size
42    pub ptr: P,
43
44    /// The number of lowered functions this component will be creating.
45    pub num_lowerings: u32,
46    /// The number of memories which are recorded in this component for options.
47    pub num_runtime_memories: u32,
48    /// The number of reallocs which are recorded in this component for options.
49    pub num_runtime_reallocs: u32,
50    /// The number of post-returns which are recorded in this component for options.
51    pub num_runtime_post_returns: u32,
52    /// Number of component instances internally in the component (always at
53    /// least 1).
54    pub num_runtime_component_instances: u32,
55    /// Number of cranelift-compiled trampolines required for this component.
56    pub num_trampolines: u32,
57    /// Number of resources within a component which need destructors stored.
58    pub num_resources: u32,
59
60    // precalculated offsets of various member fields
61    magic: u32,
62    libcalls: u32,
63    store: u32,
64    limits: u32,
65    flags: u32,
66    trampoline_func_refs: u32,
67    lowerings: u32,
68    memories: u32,
69    reallocs: u32,
70    post_returns: u32,
71    resource_destructors: u32,
72    size: u32,
73}
74
75#[inline]
76fn align(offset: u32, align: u32) -> u32 {
77    assert!(align.is_power_of_two());
78    (offset + (align - 1)) & !(align - 1)
79}
80
81impl<P: PtrSize> VMComponentOffsets<P> {
82    /// Creates a new set of offsets for the `component` specified configured
83    /// additionally for the `ptr` size specified.
84    pub fn new(ptr: P, component: &Component) -> Self {
85        let mut ret = Self {
86            ptr,
87            num_lowerings: component.num_lowerings,
88            num_runtime_memories: component.num_runtime_memories.try_into().unwrap(),
89            num_runtime_reallocs: component.num_runtime_reallocs.try_into().unwrap(),
90            num_runtime_post_returns: component.num_runtime_post_returns.try_into().unwrap(),
91            num_runtime_component_instances: component
92                .num_runtime_component_instances
93                .try_into()
94                .unwrap(),
95            num_trampolines: component.trampolines.len().try_into().unwrap(),
96            num_resources: component.num_resources,
97            magic: 0,
98            libcalls: 0,
99            store: 0,
100            limits: 0,
101            flags: 0,
102            trampoline_func_refs: 0,
103            lowerings: 0,
104            memories: 0,
105            reallocs: 0,
106            post_returns: 0,
107            resource_destructors: 0,
108            size: 0,
109        };
110
111        // Convenience functions for checked addition and multiplication.
112        // As side effect this reduces binary size by using only a single
113        // `#[track_caller]` location for each function instead of one for
114        // each individual invocation.
115        #[inline]
116        fn cmul(count: u32, size: u8) -> u32 {
117            count.checked_mul(u32::from(size)).unwrap()
118        }
119
120        let mut next_field_offset = 0;
121
122        macro_rules! fields {
123            (size($field:ident) = $size:expr, $($rest:tt)*) => {
124                ret.$field = next_field_offset;
125                next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
126                fields!($($rest)*);
127            };
128            (align($align:expr), $($rest:tt)*) => {
129                next_field_offset = align(next_field_offset, $align);
130                fields!($($rest)*);
131            };
132            () => {};
133        }
134
135        fields! {
136            size(magic) = 4u32,
137            align(u32::from(ret.ptr.size())),
138            size(libcalls) = ret.ptr.size(),
139            size(store) = cmul(2, ret.ptr.size()),
140            size(limits) = ret.ptr.size(),
141            align(16),
142            size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
143            align(u32::from(ret.ptr.size())),
144            size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
145            size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
146            size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
147            size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
148            size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
149            size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
150        }
151
152        ret.size = next_field_offset;
153
154        // This is required by the implementation of
155        // `VMComponentContext::from_opaque`. If this value changes then this
156        // location needs to be updated.
157        assert_eq!(ret.magic, 0);
158
159        return ret;
160    }
161
162    /// The size, in bytes, of the host pointer.
163    #[inline]
164    pub fn pointer_size(&self) -> u8 {
165        self.ptr.size()
166    }
167
168    /// The offset of the `magic` field.
169    #[inline]
170    pub fn magic(&self) -> u32 {
171        self.magic
172    }
173
174    /// The offset of the `libcalls` field.
175    #[inline]
176    pub fn libcalls(&self) -> u32 {
177        self.libcalls
178    }
179
180    /// The offset of the `flags` field.
181    #[inline]
182    pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
183        assert!(index.as_u32() < self.num_runtime_component_instances);
184        self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
185    }
186
187    /// The offset of the `store` field.
188    #[inline]
189    pub fn store(&self) -> u32 {
190        self.store
191    }
192
193    /// The offset of the `limits` field.
194    #[inline]
195    pub fn limits(&self) -> u32 {
196        self.limits
197    }
198
199    /// The offset of the `trampoline_func_refs` field.
200    #[inline]
201    pub fn trampoline_func_refs(&self) -> u32 {
202        self.trampoline_func_refs
203    }
204
205    /// The offset of `VMFuncRef` for the `index` specified.
206    #[inline]
207    pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
208        assert!(index.as_u32() < self.num_trampolines);
209        self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
210    }
211
212    /// The offset of the `lowerings` field.
213    #[inline]
214    pub fn lowerings(&self) -> u32 {
215        self.lowerings
216    }
217
218    /// The offset of the `VMLowering` for the `index` specified.
219    #[inline]
220    pub fn lowering(&self, index: LoweredIndex) -> u32 {
221        assert!(index.as_u32() < self.num_lowerings);
222        self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
223    }
224
225    /// The offset of the `callee` for the `index` specified.
226    #[inline]
227    pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
228        self.lowering(index) + self.lowering_callee_offset()
229    }
230
231    /// The offset of the `data` for the `index` specified.
232    #[inline]
233    pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
234        self.lowering(index) + self.lowering_data_offset()
235    }
236
237    /// The size of the `VMLowering` type
238    #[inline]
239    pub fn lowering_size(&self) -> u8 {
240        2 * self.ptr.size()
241    }
242
243    /// The offset of the `callee` field within the `VMLowering` type.
244    #[inline]
245    pub fn lowering_callee_offset(&self) -> u32 {
246        0
247    }
248
249    /// The offset of the `data` field within the `VMLowering` type.
250    #[inline]
251    pub fn lowering_data_offset(&self) -> u32 {
252        u32::from(self.ptr.size())
253    }
254
255    /// The offset of the base of the `runtime_memories` field
256    #[inline]
257    pub fn runtime_memories(&self) -> u32 {
258        self.memories
259    }
260
261    /// The offset of the `*mut VMMemoryDefinition` for the runtime index
262    /// provided.
263    #[inline]
264    pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
265        assert!(index.as_u32() < self.num_runtime_memories);
266        self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
267    }
268
269    /// The offset of the base of the `runtime_reallocs` field
270    #[inline]
271    pub fn runtime_reallocs(&self) -> u32 {
272        self.reallocs
273    }
274
275    /// The offset of the `*mut VMFuncRef` for the runtime index
276    /// provided.
277    #[inline]
278    pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
279        assert!(index.as_u32() < self.num_runtime_reallocs);
280        self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
281    }
282
283    /// The offset of the base of the `runtime_post_returns` field
284    #[inline]
285    pub fn runtime_post_returns(&self) -> u32 {
286        self.post_returns
287    }
288
289    /// The offset of the `*mut VMFuncRef` for the runtime index
290    /// provided.
291    #[inline]
292    pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
293        assert!(index.as_u32() < self.num_runtime_post_returns);
294        self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
295    }
296
297    /// The offset of the base of the `resource_destructors` field
298    #[inline]
299    pub fn resource_destructors(&self) -> u32 {
300        self.resource_destructors
301    }
302
303    /// The offset of the `*mut VMFuncRef` for the runtime index
304    /// provided.
305    #[inline]
306    pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
307        assert!(index.as_u32() < self.num_resources);
308        self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
309    }
310
311    /// Return the size of the `VMComponentContext` allocation.
312    #[inline]
313    pub fn size_of_vmctx(&self) -> u32 {
314        self.size
315    }
316}