wit_bindgen_core/
abi.rs

1use std::fmt;
2use std::iter;
3
4pub use wit_parser::abi::{AbiVariant, FlatTypes, WasmSignature, WasmType};
5use wit_parser::{
6    Alignment, ArchitectureSize, ElementInfo, Enum, Flags, FlagsRepr, Function, Handle, Int,
7    Record, Resolve, Result_, SizeAlign, Tuple, Type, TypeDefKind, TypeId, Variant, align_to_arch,
8};
9
10// Helper macro for defining instructions without having to have tons of
11// exhaustive `match` statements to update
12macro_rules! def_instruction {
13    (
14        $( #[$enum_attr:meta] )*
15        pub enum $name:ident<'a> {
16            $(
17                $( #[$attr:meta] )*
18                $variant:ident $( {
19                    $($field:ident : $field_ty:ty $(,)* )*
20                } )?
21                    :
22                [$num_popped:expr] => [$num_pushed:expr],
23            )*
24        }
25    ) => {
26        $( #[$enum_attr] )*
27        pub enum $name<'a> {
28            $(
29                $( #[$attr] )*
30                $variant $( {
31                    $(
32                        $field : $field_ty,
33                    )*
34                } )? ,
35            )*
36        }
37
38        impl $name<'_> {
39            /// How many operands does this instruction pop from the stack?
40            #[allow(unused_variables)]
41            pub fn operands_len(&self) -> usize {
42                match self {
43                    $(
44                        Self::$variant $( {
45                            $(
46                                $field,
47                            )*
48                        } )? => $num_popped,
49                    )*
50                }
51            }
52
53            /// How many results does this instruction push onto the stack?
54            #[allow(unused_variables)]
55            pub fn results_len(&self) -> usize {
56                match self {
57                    $(
58                        Self::$variant $( {
59                            $(
60                                $field,
61                            )*
62                        } )? => $num_pushed,
63                    )*
64                }
65            }
66        }
67    };
68}
69
70def_instruction! {
71    #[derive(Debug)]
72    pub enum Instruction<'a> {
73        /// Acquires the specified parameter and places it on the stack.
74        /// Depending on the context this may refer to wasm parameters or
75        /// interface types parameters.
76        GetArg { nth: usize } : [0] => [1],
77
78        // Integer const/manipulation instructions
79
80        /// Pushes the constant `val` onto the stack.
81        I32Const { val: i32 } : [0] => [1],
82        /// Casts the top N items on the stack using the `Bitcast` enum
83        /// provided. Consumes the same number of operands that this produces.
84        Bitcasts { casts: &'a [Bitcast] } : [casts.len()] => [casts.len()],
85        /// Pushes a number of constant zeros for each wasm type on the stack.
86        ConstZero { tys: &'a [WasmType] } : [0] => [tys.len()],
87
88        // Memory load/store instructions
89
90        /// Pops a pointer from the stack and loads a little-endian `i32` from
91        /// it, using the specified constant offset.
92        I32Load { offset: ArchitectureSize } : [1] => [1],
93        /// Pops a pointer from the stack and loads a little-endian `i8` from
94        /// it, using the specified constant offset. The value loaded is the
95        /// zero-extended to 32-bits
96        I32Load8U { offset: ArchitectureSize } : [1] => [1],
97        /// Pops a pointer from the stack and loads a little-endian `i8` from
98        /// it, using the specified constant offset. The value loaded is the
99        /// sign-extended to 32-bits
100        I32Load8S { offset: ArchitectureSize } : [1] => [1],
101        /// Pops a pointer from the stack and loads a little-endian `i16` from
102        /// it, using the specified constant offset. The value loaded is the
103        /// zero-extended to 32-bits
104        I32Load16U { offset: ArchitectureSize } : [1] => [1],
105        /// Pops a pointer from the stack and loads a little-endian `i16` from
106        /// it, using the specified constant offset. The value loaded is the
107        /// sign-extended to 32-bits
108        I32Load16S { offset: ArchitectureSize } : [1] => [1],
109        /// Pops a pointer from the stack and loads a little-endian `i64` from
110        /// it, using the specified constant offset.
111        I64Load { offset: ArchitectureSize } : [1] => [1],
112        /// Pops a pointer from the stack and loads a little-endian `f32` from
113        /// it, using the specified constant offset.
114        F32Load { offset: ArchitectureSize } : [1] => [1],
115        /// Pops a pointer from the stack and loads a little-endian `f64` from
116        /// it, using the specified constant offset.
117        F64Load { offset: ArchitectureSize } : [1] => [1],
118
119        /// Like `I32Load` or `I64Load`, but for loading pointer values.
120        PointerLoad { offset: ArchitectureSize } : [1] => [1],
121        /// Like `I32Load` or `I64Load`, but for loading array length values.
122        LengthLoad { offset: ArchitectureSize } : [1] => [1],
123
124        /// Pops a pointer from the stack and then an `i32` value.
125        /// Stores the value in little-endian at the pointer specified plus the
126        /// constant `offset`.
127        I32Store { offset: ArchitectureSize } : [2] => [0],
128        /// Pops a pointer from the stack and then an `i32` value.
129        /// Stores the low 8 bits of the value in little-endian at the pointer
130        /// specified plus the constant `offset`.
131        I32Store8 { offset: ArchitectureSize } : [2] => [0],
132        /// Pops a pointer from the stack and then an `i32` value.
133        /// Stores the low 16 bits of the value in little-endian at the pointer
134        /// specified plus the constant `offset`.
135        I32Store16 { offset: ArchitectureSize } : [2] => [0],
136        /// Pops a pointer from the stack and then an `i64` value.
137        /// Stores the value in little-endian at the pointer specified plus the
138        /// constant `offset`.
139        I64Store { offset: ArchitectureSize } : [2] => [0],
140        /// Pops a pointer from the stack and then an `f32` value.
141        /// Stores the value in little-endian at the pointer specified plus the
142        /// constant `offset`.
143        F32Store { offset: ArchitectureSize } : [2] => [0],
144        /// Pops a pointer from the stack and then an `f64` value.
145        /// Stores the value in little-endian at the pointer specified plus the
146        /// constant `offset`.
147        F64Store { offset: ArchitectureSize } : [2] => [0],
148
149        /// Like `I32Store` or `I64Store`, but for storing pointer values.
150        PointerStore { offset: ArchitectureSize } : [2] => [0],
151        /// Like `I32Store` or `I64Store`, but for storing array length values.
152        LengthStore { offset: ArchitectureSize } : [2] => [0],
153
154        // Scalar lifting/lowering
155
156        /// Converts an interface type `char` value to a 32-bit integer
157        /// representing the unicode scalar value.
158        I32FromChar : [1] => [1],
159        /// Converts an interface type `u64` value to a wasm `i64`.
160        I64FromU64 : [1] => [1],
161        /// Converts an interface type `s64` value to a wasm `i64`.
162        I64FromS64 : [1] => [1],
163        /// Converts an interface type `u32` value to a wasm `i32`.
164        I32FromU32 : [1] => [1],
165        /// Converts an interface type `s32` value to a wasm `i32`.
166        I32FromS32 : [1] => [1],
167        /// Converts an interface type `u16` value to a wasm `i32`.
168        I32FromU16 : [1] => [1],
169        /// Converts an interface type `s16` value to a wasm `i32`.
170        I32FromS16 : [1] => [1],
171        /// Converts an interface type `u8` value to a wasm `i32`.
172        I32FromU8 : [1] => [1],
173        /// Converts an interface type `s8` value to a wasm `i32`.
174        I32FromS8 : [1] => [1],
175        /// Conversion an interface type `f32` value to a wasm `f32`.
176        ///
177        /// This may be a noop for some implementations, but it's here in case the
178        /// native language representation of `f32` is different than the wasm
179        /// representation of `f32`.
180        CoreF32FromF32 : [1] => [1],
181        /// Conversion an interface type `f64` value to a wasm `f64`.
182        ///
183        /// This may be a noop for some implementations, but it's here in case the
184        /// native language representation of `f64` is different than the wasm
185        /// representation of `f64`.
186        CoreF64FromF64 : [1] => [1],
187
188        /// Converts a native wasm `i32` to an interface type `s8`.
189        ///
190        /// This will truncate the upper bits of the `i32`.
191        S8FromI32 : [1] => [1],
192        /// Converts a native wasm `i32` to an interface type `u8`.
193        ///
194        /// This will truncate the upper bits of the `i32`.
195        U8FromI32 : [1] => [1],
196        /// Converts a native wasm `i32` to an interface type `s16`.
197        ///
198        /// This will truncate the upper bits of the `i32`.
199        S16FromI32 : [1] => [1],
200        /// Converts a native wasm `i32` to an interface type `u16`.
201        ///
202        /// This will truncate the upper bits of the `i32`.
203        U16FromI32 : [1] => [1],
204        /// Converts a native wasm `i32` to an interface type `s32`.
205        S32FromI32 : [1] => [1],
206        /// Converts a native wasm `i32` to an interface type `u32`.
207        U32FromI32 : [1] => [1],
208        /// Converts a native wasm `i64` to an interface type `s64`.
209        S64FromI64 : [1] => [1],
210        /// Converts a native wasm `i64` to an interface type `u64`.
211        U64FromI64 : [1] => [1],
212        /// Converts a native wasm `i32` to an interface type `char`.
213        ///
214        /// It's safe to assume that the `i32` is indeed a valid unicode code point.
215        CharFromI32 : [1] => [1],
216        /// Converts a native wasm `f32` to an interface type `f32`.
217        F32FromCoreF32 : [1] => [1],
218        /// Converts a native wasm `f64` to an interface type `f64`.
219        F64FromCoreF64 : [1] => [1],
220
221        /// Creates a `bool` from an `i32` input, trapping if the `i32` isn't
222        /// zero or one.
223        BoolFromI32 : [1] => [1],
224        /// Creates an `i32` from a `bool` input, must return 0 or 1.
225        I32FromBool : [1] => [1],
226
227        // lists
228
229        /// Lowers a list where the element's layout in the native language is
230        /// expected to match the canonical ABI definition of interface types.
231        ///
232        /// Pops a list value from the stack and pushes the pointer/length onto
233        /// the stack. If `realloc` is set to `Some` then this is expected to
234        /// *consume* the list which means that the data needs to be copied. An
235        /// allocation/copy is expected when:
236        ///
237        /// * A host is calling a wasm export with a list (it needs to copy the
238        ///   list in to the callee's module, allocating space with `realloc`)
239        /// * A wasm export is returning a list (it's expected to use `realloc`
240        ///   to give ownership of the list to the caller.
241        /// * A host is returning a list in a import definition, meaning that
242        ///   space needs to be allocated in the caller with `realloc`).
243        ///
244        /// A copy does not happen (e.g. `realloc` is `None`) when:
245        ///
246        /// * A wasm module calls an import with the list. In this situation
247        ///   it's expected the caller will know how to access this module's
248        ///   memory (e.g. the host has raw access or wasm-to-wasm communication
249        ///   would copy the list).
250        ///
251        /// If `realloc` is `Some` then the adapter is not responsible for
252        /// cleaning up this list because the other end is receiving the
253        /// allocation. If `realloc` is `None` then the adapter is responsible
254        /// for cleaning up any temporary allocation it created, if any.
255        ListCanonLower {
256            element: &'a Type,
257            realloc: Option<&'a str>,
258        } : [1] => [2],
259
260        /// Same as `ListCanonLower`, but used for strings
261        StringLower {
262            realloc: Option<&'a str>,
263        } : [1] => [2],
264
265        /// Lowers a list where the element's layout in the native language is
266        /// not expected to match the canonical ABI definition of interface
267        /// types.
268        ///
269        /// Pops a list value from the stack and pushes the pointer/length onto
270        /// the stack. This operation also pops a block from the block stack
271        /// which is used as the iteration body of writing each element of the
272        /// list consumed.
273        ///
274        /// The `realloc` field here behaves the same way as `ListCanonLower`.
275        /// It's only set to `None` when a wasm module calls a declared import.
276        /// Otherwise lowering in other contexts requires allocating memory for
277        /// the receiver to own.
278        ListLower {
279            element: &'a Type,
280            realloc: Option<&'a str>,
281        } : [1] => [2],
282
283        /// Lifts a list which has a canonical representation into an interface
284        /// types value.
285        ///
286        /// The term "canonical" representation here means that the
287        /// representation of the interface types value in the native language
288        /// exactly matches the canonical ABI definition of the type.
289        ///
290        /// This will consume two `i32` values from the stack, a pointer and a
291        /// length, and then produces an interface value list.
292        ListCanonLift {
293            element: &'a Type,
294            ty: TypeId,
295        } : [2] => [1],
296
297        /// Same as `ListCanonLift`, but used for strings
298        StringLift : [2] => [1],
299
300        /// Lifts a list which into an interface types value.
301        ///
302        /// This will consume two `i32` values from the stack, a pointer and a
303        /// length, and then produces an interface value list.
304        ///
305        /// This will also pop a block from the block stack which is how to
306        /// read each individual element from the list.
307        ListLift {
308            element: &'a Type,
309            ty: TypeId,
310        } : [2] => [1],
311
312        /// Pushes an operand onto the stack representing the list item from
313        /// each iteration of the list.
314        ///
315        /// This is only used inside of blocks related to lowering lists.
316        IterElem { element: &'a Type } : [0] => [1],
317
318        /// Pushes an operand onto the stack representing the base pointer of
319        /// the next element in a list.
320        ///
321        /// This is used for both lifting and lowering lists.
322        IterBasePointer : [0] => [1],
323
324        // records and tuples
325
326        /// Pops a record value off the stack, decomposes the record to all of
327        /// its fields, and then pushes the fields onto the stack.
328        RecordLower {
329            record: &'a Record,
330            name: &'a str,
331            ty: TypeId,
332        } : [1] => [record.fields.len()],
333
334        /// Pops all fields for a record off the stack and then composes them
335        /// into a record.
336        RecordLift {
337            record: &'a Record,
338            name: &'a str,
339            ty: TypeId,
340        } : [record.fields.len()] => [1],
341
342        /// Create an `i32` from a handle.
343        HandleLower {
344            handle: &'a Handle,
345            name: &'a str,
346            ty: TypeId,
347        } : [1] => [1],
348
349        /// Create a handle from an `i32`.
350        HandleLift {
351            handle: &'a Handle,
352            name: &'a str,
353            ty: TypeId,
354        } : [1] => [1],
355
356        /// Create an `i32` from a future.
357        FutureLower {
358            payload: &'a Option<Type>,
359            ty: TypeId,
360        } : [1] => [1],
361
362        /// Create a future from an `i32`.
363        FutureLift {
364            payload: &'a Option<Type>,
365            ty: TypeId,
366        } : [1] => [1],
367
368        /// Create an `i32` from a stream.
369        StreamLower {
370            payload: &'a Option<Type>,
371            ty: TypeId,
372        } : [1] => [1],
373
374        /// Create a stream from an `i32`.
375        StreamLift {
376            payload: &'a Option<Type>,
377            ty: TypeId,
378        } : [1] => [1],
379
380        /// Create an `i32` from an error-context.
381        ErrorContextLower : [1] => [1],
382
383        /// Create a error-context from an `i32`.
384        ErrorContextLift : [1] => [1],
385
386        /// Pops a tuple value off the stack, decomposes the tuple to all of
387        /// its fields, and then pushes the fields onto the stack.
388        TupleLower {
389            tuple: &'a Tuple,
390            ty: TypeId,
391        } : [1] => [tuple.types.len()],
392
393        /// Pops all fields for a tuple off the stack and then composes them
394        /// into a tuple.
395        TupleLift {
396            tuple: &'a Tuple,
397            ty: TypeId,
398        } : [tuple.types.len()] => [1],
399
400        /// Converts a language-specific record-of-bools to a list of `i32`.
401        FlagsLower {
402            flags: &'a Flags,
403            name: &'a str,
404            ty: TypeId,
405        } : [1] => [flags.repr().count()],
406        /// Converts a list of native wasm `i32` to a language-specific
407        /// record-of-bools.
408        FlagsLift {
409            flags: &'a Flags,
410            name: &'a str,
411            ty: TypeId,
412        } : [flags.repr().count()] => [1],
413
414        // variants
415
416        /// This is a special instruction used for `VariantLower`
417        /// instruction to determine the name of the payload, if present, to use
418        /// within each block.
419        ///
420        /// Each sub-block will have this be the first instruction, and if it
421        /// lowers a payload it will expect something bound to this name.
422        VariantPayloadName : [0] => [1],
423
424        /// Pops a variant off the stack as well as `ty.cases.len()` blocks
425        /// from the code generator. Uses each of those blocks and the value
426        /// from the stack to produce `nresults` of items.
427        VariantLower {
428            variant: &'a Variant,
429            name: &'a str,
430            ty: TypeId,
431            results: &'a [WasmType],
432        } : [1] => [results.len()],
433
434        /// Pops an `i32` off the stack as well as `ty.cases.len()` blocks
435        /// from the code generator. Uses each of those blocks and the value
436        /// from the stack to produce a final variant.
437        VariantLift {
438            variant: &'a Variant,
439            name: &'a str,
440            ty: TypeId,
441        } : [1] => [1],
442
443        /// Pops an enum off the stack and pushes the `i32` representation.
444        EnumLower {
445            enum_: &'a Enum,
446            name: &'a str,
447            ty: TypeId,
448        } : [1] => [1],
449
450        /// Pops an `i32` off the stack and lifts it into the `enum` specified.
451        EnumLift {
452            enum_: &'a Enum,
453            name: &'a str,
454            ty: TypeId,
455        } : [1] => [1],
456
457        /// Specialization of `VariantLower` for specifically `option<T>` types,
458        /// otherwise behaves the same as `VariantLower` (e.g. two blocks for
459        /// the two cases.
460        OptionLower {
461            payload: &'a Type,
462            ty: TypeId,
463            results: &'a [WasmType],
464        } : [1] => [results.len()],
465
466        /// Specialization of `VariantLift` for specifically the `option<T>`
467        /// type. Otherwise behaves the same as the `VariantLift` instruction
468        /// with two blocks for the lift.
469        OptionLift {
470            payload: &'a Type,
471            ty: TypeId,
472        } : [1] => [1],
473
474        /// Specialization of `VariantLower` for specifically `result<T, E>`
475        /// types, otherwise behaves the same as `VariantLower` (e.g. two blocks
476        /// for the two cases.
477        ResultLower {
478            result: &'a Result_
479            ty: TypeId,
480            results: &'a [WasmType],
481        } : [1] => [results.len()],
482
483        /// Specialization of `VariantLift` for specifically the `result<T,
484        /// E>` type. Otherwise behaves the same as the `VariantLift`
485        /// instruction with two blocks for the lift.
486        ResultLift {
487            result: &'a Result_,
488            ty: TypeId,
489        } : [1] => [1],
490
491        // calling/control flow
492
493        /// Represents a call to a raw WebAssembly API. The module/name are
494        /// provided inline as well as the types if necessary.
495        CallWasm {
496            name: &'a str,
497            sig: &'a WasmSignature,
498        } : [sig.params.len()] => [sig.results.len()],
499
500        /// Same as `CallWasm`, except the dual where an interface is being
501        /// called rather than a raw wasm function.
502        ///
503        /// Note that this will be used for async functions, and `async_`
504        /// indicates whether the function should be invoked in an async
505        /// fashion.
506        CallInterface {
507            func: &'a Function,
508            async_: bool,
509        } : [func.params.len()] => [usize::from(func.result.is_some())],
510
511        /// Returns `amt` values on the stack. This is always the last
512        /// instruction.
513        Return { amt: usize, func: &'a Function } : [*amt] => [0],
514
515        /// Calls the `realloc` function specified in a malloc-like fashion
516        /// allocating `size` bytes with alignment `align`.
517        ///
518        /// Pushes the returned pointer onto the stack.
519        Malloc {
520            realloc: &'static str,
521            size: ArchitectureSize,
522            align: Alignment,
523        } : [0] => [1],
524
525        /// Used exclusively for guest-code generation this indicates that
526        /// the standard memory deallocation function needs to be invoked with
527        /// the specified parameters.
528        ///
529        /// This will pop a pointer from the stack and push nothing.
530        GuestDeallocate {
531            size: ArchitectureSize,
532            align: Alignment,
533        } : [1] => [0],
534
535        /// Used exclusively for guest-code generation this indicates that
536        /// a string is being deallocated. The ptr/length are on the stack and
537        /// are poppped off and used to deallocate the string.
538        GuestDeallocateString : [2] => [0],
539
540        /// Used exclusively for guest-code generation this indicates that
541        /// a list is being deallocated. The ptr/length are on the stack and
542        /// are poppped off and used to deallocate the list.
543        ///
544        /// This variant also pops a block off the block stack to be used as the
545        /// body of the deallocation loop.
546        GuestDeallocateList {
547            element: &'a Type,
548        } : [2] => [0],
549
550        /// Used exclusively for guest-code generation this indicates that
551        /// a variant is being deallocated. The integer discriminant is popped
552        /// off the stack as well as `blocks` number of blocks popped from the
553        /// blocks stack. The variant is used to select, at runtime, which of
554        /// the blocks is executed to deallocate the variant.
555        GuestDeallocateVariant {
556            blocks: usize,
557        } : [1] => [0],
558
559        /// Deallocates the language-specific handle representation on the top
560        /// of the stack. Used for async imports.
561        DropHandle { ty: &'a Type } : [1] => [0],
562
563        /// Call `task.return` for an async-lifted export.
564        ///
565        /// This will call core wasm import `name` which will be mapped to
566        /// `task.return` later on. The function given has `params` as its
567        /// parameters and it will return no results. This is used to pass the
568        /// lowered representation of a function's results to `task.return`.
569        AsyncTaskReturn { name: &'a str, params: &'a [WasmType] } : [params.len()] => [0],
570
571        /// Force the evaluation of the specified number of expressions and push
572        /// the results to the stack.
573        ///
574        /// This is useful prior to disposing of temporary variables and/or
575        /// allocations which are referenced by one or more not-yet-evaluated
576        /// expressions.
577        Flush { amt: usize } : [*amt] => [*amt],
578    }
579}
580
581#[derive(Debug, PartialEq)]
582pub enum Bitcast {
583    // Upcasts
584    F32ToI32,
585    F64ToI64,
586    I32ToI64,
587    F32ToI64,
588
589    // Downcasts
590    I32ToF32,
591    I64ToF64,
592    I64ToI32,
593    I64ToF32,
594
595    // PointerOrI64 conversions. These preserve provenance when the source
596    // or destination is a pointer value.
597    //
598    // These are used when pointer values are being stored in
599    // (ToP64) and loaded out of (P64To) PointerOrI64 values, so they
600    // always have to preserve provenance when the value being loaded or
601    // stored is a pointer.
602    P64ToI64,
603    I64ToP64,
604    P64ToP,
605    PToP64,
606
607    // Pointer<->number conversions. These do not preserve provenance.
608    //
609    // These are used when integer or floating-point values are being stored in
610    // (I32ToP/etc.) and loaded out of (PToI32/etc.) pointer values, so they
611    // never have any provenance to preserve.
612    I32ToP,
613    PToI32,
614    PToL,
615    LToP,
616
617    // Number<->Number conversions.
618    I32ToL,
619    LToI32,
620    I64ToL,
621    LToI64,
622
623    // Multiple conversions in sequence.
624    Sequence(Box<[Bitcast; 2]>),
625
626    None,
627}
628
629/// Whether the glue code surrounding a call is lifting arguments and lowering
630/// results or vice versa.
631#[derive(Clone, Copy, PartialEq, Eq)]
632pub enum LiftLower {
633    /// When the glue code lifts arguments and lowers results.
634    ///
635    /// ```text
636    /// Wasm --lift-args--> SourceLanguage; call; SourceLanguage --lower-results--> Wasm
637    /// ```
638    LiftArgsLowerResults,
639    /// When the glue code lowers arguments and lifts results.
640    ///
641    /// ```text
642    /// SourceLanguage --lower-args--> Wasm; call; Wasm --lift-results--> SourceLanguage
643    /// ```
644    LowerArgsLiftResults,
645}
646
647/// Trait for language implementors to use to generate glue code between native
648/// WebAssembly signatures and interface types signatures.
649///
650/// This is used as an implementation detail in interpreting the ABI between
651/// interface types and wasm types. Eventually this will be driven by interface
652/// types adapters themselves, but for now the ABI of a function dictates what
653/// instructions are fed in.
654///
655/// Types implementing `Bindgen` are incrementally fed `Instruction` values to
656/// generate code for. Instructions operate like a stack machine where each
657/// instruction has a list of inputs and a list of outputs (provided by the
658/// `emit` function).
659pub trait Bindgen {
660    /// The intermediate type for fragments of code for this type.
661    ///
662    /// For most languages `String` is a suitable intermediate type.
663    type Operand: Clone + fmt::Debug;
664
665    /// Emit code to implement the given instruction.
666    ///
667    /// Each operand is given in `operands` and can be popped off if ownership
668    /// is required. It's guaranteed that `operands` has the appropriate length
669    /// for the `inst` given, as specified with [`Instruction`].
670    ///
671    /// Each result variable should be pushed onto `results`. This function must
672    /// push the appropriate number of results or binding generation will panic.
673    fn emit(
674        &mut self,
675        resolve: &Resolve,
676        inst: &Instruction<'_>,
677        operands: &mut Vec<Self::Operand>,
678        results: &mut Vec<Self::Operand>,
679    );
680
681    /// Gets a operand reference to the return pointer area.
682    ///
683    /// The provided size and alignment is for the function's return type.
684    fn return_pointer(&mut self, size: ArchitectureSize, align: Alignment) -> Self::Operand;
685
686    /// Enters a new block of code to generate code for.
687    ///
688    /// This is currently exclusively used for constructing variants. When a
689    /// variant is constructed a block here will be pushed for each case of a
690    /// variant, generating the code necessary to translate a variant case.
691    ///
692    /// Blocks are completed with `finish_block` below. It's expected that `emit`
693    /// will always push code (if necessary) into the "current block", which is
694    /// updated by calling this method and `finish_block` below.
695    fn push_block(&mut self);
696
697    /// Indicates to the code generator that a block is completed, and the
698    /// `operand` specified was the resulting value of the block.
699    ///
700    /// This method will be used to compute the value of each arm of lifting a
701    /// variant. The `operand` will be `None` if the variant case didn't
702    /// actually have any type associated with it. Otherwise it will be `Some`
703    /// as the last value remaining on the stack representing the value
704    /// associated with a variant's `case`.
705    ///
706    /// It's expected that this will resume code generation in the previous
707    /// block before `push_block` was called. This must also save the results
708    /// of the current block internally for instructions like `ResultLift` to
709    /// use later.
710    fn finish_block(&mut self, operand: &mut Vec<Self::Operand>);
711
712    /// Returns size information that was previously calculated for all types.
713    fn sizes(&self) -> &SizeAlign;
714
715    /// Returns whether or not the specified element type is represented in a
716    /// "canonical" form for lists. This dictates whether the `ListCanonLower`
717    /// and `ListCanonLift` instructions are used or not.
718    fn is_list_canonical(&self, resolve: &Resolve, element: &Type) -> bool;
719}
720
721/// Generates an abstract sequence of instructions which represents this
722/// function being adapted as an imported function.
723///
724/// The instructions here, when executed, will emulate a language with
725/// interface types calling the concrete wasm implementation. The parameters
726/// for the returned instruction sequence are the language's own
727/// interface-types parameters. One instruction in the instruction stream
728/// will be a `Call` which represents calling the actual raw wasm function
729/// signature.
730///
731/// This function is useful, for example, if you're building a language
732/// generator for WASI bindings. This will document how to translate
733/// language-specific values into the wasm types to call a WASI function,
734/// and it will also automatically convert the results of the WASI function
735/// back to a language-specific value.
736pub fn call(
737    resolve: &Resolve,
738    variant: AbiVariant,
739    lift_lower: LiftLower,
740    func: &Function,
741    bindgen: &mut impl Bindgen,
742    async_: bool,
743) {
744    Generator::new(resolve, bindgen).call(func, variant, lift_lower, async_);
745}
746
747pub fn lower_to_memory<B: Bindgen>(
748    resolve: &Resolve,
749    bindgen: &mut B,
750    address: B::Operand,
751    value: B::Operand,
752    ty: &Type,
753) {
754    let mut generator = Generator::new(resolve, bindgen);
755    // TODO: make this configurable? Right now this function is only called for
756    // future/stream callbacks so it's appropriate to skip realloc here as it's
757    // all "lower for wasm import", but this might get reused for something else
758    // in the future.
759    generator.realloc = Some(Realloc::Export("cabi_realloc"));
760    generator.stack.push(value);
761    generator.write_to_memory(ty, address, Default::default());
762}
763
764pub fn lower_flat<B: Bindgen>(
765    resolve: &Resolve,
766    bindgen: &mut B,
767    value: B::Operand,
768    ty: &Type,
769) -> Vec<B::Operand> {
770    let mut generator = Generator::new(resolve, bindgen);
771    generator.stack.push(value);
772    generator.realloc = Some(Realloc::Export("cabi_realloc"));
773    generator.lower(ty);
774    generator.stack
775}
776
777pub fn lift_from_memory<B: Bindgen>(
778    resolve: &Resolve,
779    bindgen: &mut B,
780    address: B::Operand,
781    ty: &Type,
782) -> B::Operand {
783    let mut generator = Generator::new(resolve, bindgen);
784    generator.read_from_memory(ty, address, Default::default());
785    generator.stack.pop().unwrap()
786}
787
788/// Used in a similar manner as the `Interface::call` function except is
789/// used to generate the `post-return` callback for `func`.
790///
791/// This is only intended to be used in guest generators for exported
792/// functions and will primarily generate `GuestDeallocate*` instructions,
793/// plus others used as input to those instructions.
794pub fn post_return(resolve: &Resolve, func: &Function, bindgen: &mut impl Bindgen) {
795    Generator::new(resolve, bindgen).post_return(func);
796}
797
798/// Returns whether the `Function` specified needs a post-return function to
799/// be generated in guest code.
800///
801/// This is used when the return value contains a memory allocation such as
802/// a list or a string primarily.
803pub fn guest_export_needs_post_return(resolve: &Resolve, func: &Function) -> bool {
804    func.result
805        .map(|t| needs_deallocate(resolve, &t, Deallocate::Lists))
806        .unwrap_or(false)
807}
808
809pub fn guest_export_params_have_allocations(resolve: &Resolve, func: &Function) -> bool {
810    func.params
811        .iter()
812        .any(|(_, t)| needs_deallocate(resolve, &t, Deallocate::Lists))
813}
814
815fn needs_deallocate(resolve: &Resolve, ty: &Type, what: Deallocate) -> bool {
816    match ty {
817        Type::String => true,
818        Type::ErrorContext => true,
819        Type::Id(id) => match &resolve.types[*id].kind {
820            TypeDefKind::List(_) => true,
821            TypeDefKind::Type(t) => needs_deallocate(resolve, t, what),
822            TypeDefKind::Handle(Handle::Own(_)) => what.handles(),
823            TypeDefKind::Handle(Handle::Borrow(_)) => false,
824            TypeDefKind::Resource => false,
825            TypeDefKind::Record(r) => r
826                .fields
827                .iter()
828                .any(|f| needs_deallocate(resolve, &f.ty, what)),
829            TypeDefKind::Tuple(t) => t.types.iter().any(|t| needs_deallocate(resolve, t, what)),
830            TypeDefKind::Variant(t) => t
831                .cases
832                .iter()
833                .filter_map(|t| t.ty.as_ref())
834                .any(|t| needs_deallocate(resolve, t, what)),
835            TypeDefKind::Option(t) => needs_deallocate(resolve, t, what),
836            TypeDefKind::Result(t) => [&t.ok, &t.err]
837                .iter()
838                .filter_map(|t| t.as_ref())
839                .any(|t| needs_deallocate(resolve, t, what)),
840            TypeDefKind::Flags(_) | TypeDefKind::Enum(_) => false,
841            TypeDefKind::Future(_) | TypeDefKind::Stream(_) => what.handles(),
842            TypeDefKind::Unknown => unreachable!(),
843            TypeDefKind::FixedSizeList(..) => todo!(),
844        },
845
846        Type::Bool
847        | Type::U8
848        | Type::S8
849        | Type::U16
850        | Type::S16
851        | Type::U32
852        | Type::S32
853        | Type::U64
854        | Type::S64
855        | Type::F32
856        | Type::F64
857        | Type::Char => false,
858    }
859}
860
861/// Generate instructions in `bindgen` to deallocate all lists in `ptr` where
862/// that's a pointer to a sequence of `types` stored in linear memory.
863pub fn deallocate_lists_in_types<B: Bindgen>(
864    resolve: &Resolve,
865    types: &[Type],
866    operands: &[B::Operand],
867    indirect: bool,
868    bindgen: &mut B,
869) {
870    Generator::new(resolve, bindgen).deallocate_in_types(
871        types,
872        operands,
873        indirect,
874        Deallocate::Lists,
875    );
876}
877
878/// Generate instructions in `bindgen` to deallocate all lists in `ptr` where
879/// that's a pointer to a sequence of `types` stored in linear memory.
880pub fn deallocate_lists_and_own_in_types<B: Bindgen>(
881    resolve: &Resolve,
882    types: &[Type],
883    operands: &[B::Operand],
884    indirect: bool,
885    bindgen: &mut B,
886) {
887    Generator::new(resolve, bindgen).deallocate_in_types(
888        types,
889        operands,
890        indirect,
891        Deallocate::ListsAndOwn,
892    );
893}
894
895#[derive(Copy, Clone)]
896pub enum Realloc {
897    None,
898    Export(&'static str),
899}
900
901/// What to deallocate in various `deallocate_*` methods.
902#[derive(Copy, Clone)]
903enum Deallocate {
904    /// Only deallocate lists.
905    Lists,
906    /// Deallocate lists and owned resources such as `own<T>` and
907    /// futures/streams.
908    ListsAndOwn,
909}
910
911impl Deallocate {
912    fn handles(&self) -> bool {
913        match self {
914            Deallocate::Lists => false,
915            Deallocate::ListsAndOwn => true,
916        }
917    }
918}
919
920struct Generator<'a, B: Bindgen> {
921    bindgen: &'a mut B,
922    resolve: &'a Resolve,
923    operands: Vec<B::Operand>,
924    results: Vec<B::Operand>,
925    stack: Vec<B::Operand>,
926    return_pointer: Option<B::Operand>,
927    realloc: Option<Realloc>,
928}
929
930const MAX_FLAT_PARAMS: usize = 16;
931const MAX_FLAT_ASYNC_PARAMS: usize = 4;
932
933impl<'a, B: Bindgen> Generator<'a, B> {
934    fn new(resolve: &'a Resolve, bindgen: &'a mut B) -> Generator<'a, B> {
935        Generator {
936            resolve,
937            bindgen,
938            operands: Vec::new(),
939            results: Vec::new(),
940            stack: Vec::new(),
941            return_pointer: None,
942            realloc: None,
943        }
944    }
945
946    fn call(&mut self, func: &Function, variant: AbiVariant, lift_lower: LiftLower, async_: bool) {
947        let sig = self.resolve.wasm_signature(variant, func);
948
949        // Lowering parameters calling a wasm import _or_ returning a result
950        // from an async-lifted wasm export means we don't need to pass
951        // ownership, but we pass ownership in all other cases.
952        let realloc = match (variant, lift_lower, async_) {
953            (AbiVariant::GuestImport, LiftLower::LowerArgsLiftResults, _)
954            | (
955                AbiVariant::GuestExport
956                | AbiVariant::GuestExportAsync
957                | AbiVariant::GuestExportAsyncStackful,
958                LiftLower::LiftArgsLowerResults,
959                true,
960            ) => Realloc::None,
961            _ => Realloc::Export("cabi_realloc"),
962        };
963        assert!(self.realloc.is_none());
964
965        match lift_lower {
966            LiftLower::LowerArgsLiftResults => {
967                self.realloc = Some(realloc);
968
969                // Create a function that performs individual lowering of operands
970                let lower_to_memory = |self_: &mut Self, ptr: B::Operand| {
971                    let mut offset = ArchitectureSize::default();
972                    for (nth, (_, ty)) in func.params.iter().enumerate() {
973                        self_.emit(&Instruction::GetArg { nth });
974                        offset = align_to_arch(offset, self_.bindgen.sizes().align(ty));
975                        self_.write_to_memory(ty, ptr.clone(), offset);
976                        offset += self_.bindgen.sizes().size(ty);
977                    }
978
979                    self_.stack.push(ptr);
980                };
981
982                // Lower parameters
983                if sig.indirect_params {
984                    // If parameters are indirect space is
985                    // allocated for them and each argument is lowered
986                    // individually into memory.
987                    let ElementInfo { size, align } = self
988                        .bindgen
989                        .sizes()
990                        .record(func.params.iter().map(|t| &t.1));
991
992                    // Resolve the pointer to the indirectly stored parameters
993                    let ptr = match variant {
994                        // When a wasm module calls an import it will provide
995                        // space that isn't explicitly deallocated.
996                        AbiVariant::GuestImport => self.bindgen.return_pointer(size, align),
997
998                        AbiVariant::GuestImportAsync => {
999                            todo!("direct param lowering for async guest import not implemented")
1000                        }
1001
1002                        // When calling a wasm module from the outside, though,
1003                        // malloc needs to be called.
1004                        AbiVariant::GuestExport => {
1005                            self.emit(&Instruction::Malloc {
1006                                realloc: "cabi_realloc",
1007                                size,
1008                                align,
1009                            });
1010                            self.stack.pop().unwrap()
1011                        }
1012
1013                        AbiVariant::GuestExportAsync | AbiVariant::GuestExportAsyncStackful => {
1014                            todo!("direct param lowering for async not implemented")
1015                        }
1016                    };
1017
1018                    // Lower the parameters to memory
1019                    lower_to_memory(self, ptr);
1020                } else {
1021                    // ... otherwise arguments are direct,
1022                    // (there aren't too many) then we simply do a normal lower
1023                    // operation for them all.
1024                    for (nth, (_, ty)) in func.params.iter().enumerate() {
1025                        self.emit(&Instruction::GetArg { nth });
1026                        self.lower(ty);
1027                    }
1028                }
1029                self.realloc = None;
1030
1031                // If necessary we may need to prepare a return pointer for this ABI.
1032                if variant == AbiVariant::GuestImport && sig.retptr {
1033                    let info = self.bindgen.sizes().params(&func.result);
1034                    let ptr = self.bindgen.return_pointer(info.size, info.align);
1035                    self.return_pointer = Some(ptr.clone());
1036                    self.stack.push(ptr);
1037                }
1038
1039                // Call the Wasm function
1040                assert_eq!(self.stack.len(), sig.params.len());
1041                self.emit(&Instruction::CallWasm {
1042                    name: &func.name,
1043                    sig: &sig,
1044                });
1045
1046                // Handle the result
1047                if sig.retptr {
1048                    // If there is a return pointer, we must get the pointer to where results
1049                    // should be stored, and store the results there?
1050
1051                    let ptr = match variant {
1052                        // imports into guests means it's a wasm module
1053                        // calling an imported function. We supplied the
1054                        // return pointer as the last argument (saved in
1055                        // `self.return_pointer`) so we use that to read
1056                        // the result of the function from memory.
1057                        AbiVariant::GuestImport => {
1058                            assert!(sig.results.is_empty());
1059                            self.return_pointer.take().unwrap()
1060                        }
1061
1062                        // guest exports means that this is a host
1063                        // calling wasm so wasm returned a pointer to where
1064                        // the result is stored
1065                        AbiVariant::GuestExport => self.stack.pop().unwrap(),
1066
1067                        AbiVariant::GuestImportAsync
1068                        | AbiVariant::GuestExportAsync
1069                        | AbiVariant::GuestExportAsyncStackful => {
1070                            unreachable!()
1071                        }
1072                    };
1073
1074                    if let (AbiVariant::GuestExport, true) = (variant, async_) {
1075                        // If we're dealing with an async function, the result should not be read from memory
1076                        // immediately, as it's the async call result
1077                        //
1078                        // We can leave the result of the call (the indication of what to do as an async call)
1079                        // on the stack as a return
1080                        self.stack.push(ptr);
1081                    } else {
1082                        // If we're not dealing with an async call, the result must be in memory at this point and can be read out
1083                        self.read_results_from_memory(
1084                            &func.result,
1085                            ptr.clone(),
1086                            ArchitectureSize::default(),
1087                        );
1088                        self.emit(&Instruction::Flush {
1089                            amt: usize::from(func.result.is_some()),
1090                        });
1091                    }
1092                } else {
1093                    // With no return pointer in use we can simply lift the
1094                    // result(s) of the function from the result of the core
1095                    // wasm function.
1096                    if let Some(ty) = &func.result {
1097                        self.lift(ty)
1098                    }
1099                }
1100
1101                // Emit the function return
1102                self.emit(&Instruction::Return {
1103                    func,
1104                    amt: usize::from(func.result.is_some()),
1105                });
1106            }
1107
1108            LiftLower::LiftArgsLowerResults => {
1109                let max_flat_params = match (variant, async_) {
1110                    (AbiVariant::GuestImportAsync, _is_async @ true) => MAX_FLAT_ASYNC_PARAMS,
1111                    _ => MAX_FLAT_PARAMS,
1112                };
1113
1114                // Read parameters from memory
1115                let read_from_memory = |self_: &mut Self| {
1116                    let mut offset = ArchitectureSize::default();
1117                    let ptr = self_
1118                        .stack
1119                        .pop()
1120                        .expect("empty stack during read param from memory");
1121                    for (_, ty) in func.params.iter() {
1122                        offset = align_to_arch(offset, self_.bindgen.sizes().align(ty));
1123                        self_.read_from_memory(ty, ptr.clone(), offset);
1124                        offset += self_.bindgen.sizes().size(ty);
1125                    }
1126                };
1127
1128                // Resolve parameters
1129                if sig.indirect_params {
1130                    // If parameters were passed indirectly, arguments must be
1131                    // read in succession from memory, with the pointer to the arguments
1132                    // being the first argument to the function.
1133                    self.emit(&Instruction::GetArg { nth: 0 });
1134                    read_from_memory(self);
1135                } else {
1136                    // ... otherwise, if parameters were passed directly then we lift each
1137                    // argument in succession from the component wasm types that
1138                    // make-up the type.
1139                    let mut offset = 0;
1140                    for (param_name, ty) in func.params.iter() {
1141                        let Some(types) = flat_types(self.resolve, ty, Some(max_flat_params))
1142                        else {
1143                            panic!(
1144                                "failed to flatten types during direct parameter lifting ('{param_name}' in func '{}')",
1145                                func.name
1146                            );
1147                        };
1148                        for _ in 0..types.len() {
1149                            self.emit(&Instruction::GetArg { nth: offset });
1150                            offset += 1;
1151                        }
1152                        self.lift(ty);
1153                    }
1154                }
1155
1156                // ... and that allows us to call the interface types function
1157                self.emit(&Instruction::CallInterface { func, async_ });
1158
1159                // The return value of an async function is *not* the result of the function
1160                // itself or a pointer but rather a status code.
1161                //
1162                // Asynchronous functions will call `task.return` after the
1163                // interface function completes, so lowering is conditional
1164                // based on slightly different logic for the `task.return`
1165                // intrinsic.
1166                //
1167                // Note that in the async import case teh code below deals with the CM function being lowered,
1168                // not the core function that is underneath that (i.e. func.result may be empty,
1169                // where the associated core function underneath must have a i32 status code result)
1170                let (lower_to_memory, async_flat_results) = match (async_, &func.result) {
1171                    // All async cases pass along the function results and flatten where necesary
1172                    (_is_async @ true, func_result) => {
1173                        let results = match &func_result {
1174                            Some(ty) => flat_types(self.resolve, ty, Some(max_flat_params)),
1175                            None => Some(Vec::new()),
1176                        };
1177                        (results.is_none(), Some(results))
1178                    }
1179                    // All other non-async cases
1180                    (_is_async @ false, _) => (sig.retptr, None),
1181                };
1182
1183                // This was dynamically allocated by the caller (or async start
1184                // function) so after it's been read by the guest we need to
1185                // deallocate it.
1186                if let AbiVariant::GuestExport
1187                | AbiVariant::GuestExportAsync
1188                | AbiVariant::GuestExportAsyncStackful = variant
1189                {
1190                    if sig.indirect_params && !async_ {
1191                        let ElementInfo { size, align } = self
1192                            .bindgen
1193                            .sizes()
1194                            .record(func.params.iter().map(|t| &t.1));
1195                        self.emit(&Instruction::GetArg { nth: 0 });
1196                        self.emit(&Instruction::GuestDeallocate { size, align });
1197                    }
1198                }
1199
1200                self.realloc = Some(realloc);
1201
1202                // Perform memory lowing of relevant results, including out pointers as well as traditional results
1203                match (lower_to_memory, sig.retptr, variant) {
1204                    // For sync calls, if no lowering to memory is required and there *is* a return pointer in use
1205                    // then we need to lower then simply lower the result(s) and return that directly from the function.
1206                    (_lower_to_memory @ false, _, _) => {
1207                        if let Some(ty) = &func.result {
1208                            self.lower(ty);
1209                        }
1210                    }
1211
1212                    // Lowering to memory for a guest import
1213                    //
1214                    // When a function is imported to a guest this means
1215                    // it's a host providing the implementation of the
1216                    // import. The result is stored in the pointer
1217                    // specified in the last argument, so we get the
1218                    // pointer here and then write the return value into
1219                    // it.
1220                    (
1221                        _lower_to_memory @ true,
1222                        _has_ret_ptr @ true,
1223                        AbiVariant::GuestImport | AbiVariant::GuestImportAsync,
1224                    ) => {
1225                        self.emit(&Instruction::GetArg {
1226                            nth: sig.params.len() - 1,
1227                        });
1228                        let ptr = self
1229                            .stack
1230                            .pop()
1231                            .expect("empty stack during result lower to memory");
1232                        self.write_params_to_memory(&func.result, ptr, Default::default());
1233                    }
1234
1235                    // Lowering to memory for a guest export
1236                    //
1237                    // For a guest import this is a function defined in
1238                    // wasm, so we're returning a pointer where the
1239                    // value was stored at. Allocate some space here
1240                    // (statically) and then write the result into that
1241                    // memory, returning the pointer at the end.
1242                    (_lower_to_memory @ true, _, variant) => match variant {
1243                        AbiVariant::GuestExport | AbiVariant::GuestExportAsync => {
1244                            let ElementInfo { size, align } =
1245                                self.bindgen.sizes().params(&func.result);
1246                            let ptr = self.bindgen.return_pointer(size, align);
1247                            self.write_params_to_memory(
1248                                &func.result,
1249                                ptr.clone(),
1250                                Default::default(),
1251                            );
1252                            self.stack.push(ptr);
1253                        }
1254                        AbiVariant::GuestImport | AbiVariant::GuestImportAsync => {
1255                            unreachable!(
1256                                "lowering to memory cannot be performed without a return pointer ({async_note} func [{func_name}], variant {variant:#?})",
1257                                async_note = async_.then_some("async").unwrap_or("sync"),
1258                                func_name = func.name,
1259                            )
1260                        }
1261                        AbiVariant::GuestExportAsyncStackful => {
1262                            todo!("stackful exports are not yet supported")
1263                        }
1264                    },
1265                }
1266
1267                // Build and emit the appropriate return
1268                match (variant, async_flat_results) {
1269                    // Async guest imports always return a i32 status code
1270                    (AbiVariant::GuestImport | AbiVariant::GuestImportAsync, None) if async_ => {
1271                        unreachable!("async guest imports must have a return")
1272                    }
1273
1274                    // Async guest imports with results return the status code, not a pointer to any results
1275                    (AbiVariant::GuestImport | AbiVariant::GuestImportAsync, Some(results))
1276                        if async_ =>
1277                    {
1278                        let name = &format!("[task-return]{}", func.name);
1279                        let params = results.as_deref().unwrap_or_default();
1280                        self.emit(&Instruction::AsyncTaskReturn { name, params });
1281                    }
1282
1283                    // All async/non-async cases with results that need to be returned
1284                    //
1285                    // In practice, async imports should not end up here, as the returned result of an
1286                    // async import is *not* a pointer but instead a status code.
1287                    (_, Some(results)) => {
1288                        let name = &format!("[task-return]{}", func.name);
1289                        let params = results.as_deref().unwrap_or(&[WasmType::Pointer]);
1290                        self.emit(&Instruction::AsyncTaskReturn { name, params });
1291                    }
1292
1293                    // All async/non-async cases with no results
1294                    (_, None) => {
1295                        if async_ {
1296                            let name = &format!("[task-return]{}", func.name);
1297                            self.emit(&Instruction::AsyncTaskReturn {
1298                                name: name,
1299                                params: if sig.results.len() > MAX_FLAT_ASYNC_PARAMS {
1300                                    &[WasmType::Pointer]
1301                                } else {
1302                                    &sig.results
1303                                },
1304                            });
1305                        } else {
1306                            self.emit(&Instruction::Return {
1307                                func,
1308                                amt: sig.results.len(),
1309                            });
1310                        }
1311                    }
1312                }
1313
1314                self.realloc = None;
1315            }
1316        }
1317
1318        assert!(self.realloc.is_none());
1319
1320        assert!(
1321            self.stack.is_empty(),
1322            "stack has {} items remaining: {:?}",
1323            self.stack.len(),
1324            self.stack,
1325        );
1326    }
1327
1328    fn post_return(&mut self, func: &Function) {
1329        let sig = self.resolve.wasm_signature(AbiVariant::GuestExport, func);
1330
1331        // Currently post-return is only used for lists and lists are always
1332        // returned indirectly through memory due to their flat representation
1333        // having more than one type. Assert that a return pointer is used,
1334        // though, in case this ever changes.
1335        assert!(sig.retptr);
1336
1337        self.emit(&Instruction::GetArg { nth: 0 });
1338        let addr = self.stack.pop().unwrap();
1339
1340        let mut types = Vec::new();
1341        types.extend(func.result);
1342        self.deallocate_in_types(&types, &[addr], true, Deallocate::Lists);
1343
1344        self.emit(&Instruction::Return { func, amt: 0 });
1345    }
1346
1347    fn deallocate_in_types(
1348        &mut self,
1349        types: &[Type],
1350        operands: &[B::Operand],
1351        indirect: bool,
1352        what: Deallocate,
1353    ) {
1354        if indirect {
1355            assert_eq!(operands.len(), 1);
1356            for (offset, ty) in self.bindgen.sizes().field_offsets(types) {
1357                self.deallocate_indirect(ty, operands[0].clone(), offset, what);
1358            }
1359            assert!(
1360                self.stack.is_empty(),
1361                "stack has {} items remaining",
1362                self.stack.len()
1363            );
1364        } else {
1365            let mut operands = operands;
1366            let mut operands_for_ty;
1367            for ty in types {
1368                let types = flat_types(self.resolve, ty, None).unwrap();
1369                (operands_for_ty, operands) = operands.split_at(types.len());
1370                self.stack.extend_from_slice(operands_for_ty);
1371                self.deallocate(ty, what);
1372                assert!(
1373                    self.stack.is_empty(),
1374                    "stack has {} items remaining",
1375                    self.stack.len()
1376                );
1377            }
1378            assert!(operands.is_empty());
1379        }
1380    }
1381
1382    fn emit(&mut self, inst: &Instruction<'_>) {
1383        self.operands.clear();
1384        self.results.clear();
1385
1386        let operands_len = inst.operands_len();
1387        assert!(
1388            self.stack.len() >= operands_len,
1389            "not enough operands on stack for {:?}: have {} need {operands_len}",
1390            inst,
1391            self.stack.len(),
1392        );
1393        self.operands
1394            .extend(self.stack.drain((self.stack.len() - operands_len)..));
1395        self.results.reserve(inst.results_len());
1396
1397        self.bindgen
1398            .emit(self.resolve, inst, &mut self.operands, &mut self.results);
1399
1400        assert_eq!(
1401            self.results.len(),
1402            inst.results_len(),
1403            "{:?} expected {} results, got {}",
1404            inst,
1405            inst.results_len(),
1406            self.results.len()
1407        );
1408        self.stack.append(&mut self.results);
1409    }
1410
1411    fn push_block(&mut self) {
1412        self.bindgen.push_block();
1413    }
1414
1415    fn finish_block(&mut self, size: usize) {
1416        self.operands.clear();
1417        assert!(
1418            size <= self.stack.len(),
1419            "not enough operands on stack for finishing block",
1420        );
1421        self.operands
1422            .extend(self.stack.drain((self.stack.len() - size)..));
1423        self.bindgen.finish_block(&mut self.operands);
1424    }
1425
1426    fn lower(&mut self, ty: &Type) {
1427        use Instruction::*;
1428
1429        match *ty {
1430            Type::Bool => self.emit(&I32FromBool),
1431            Type::S8 => self.emit(&I32FromS8),
1432            Type::U8 => self.emit(&I32FromU8),
1433            Type::S16 => self.emit(&I32FromS16),
1434            Type::U16 => self.emit(&I32FromU16),
1435            Type::S32 => self.emit(&I32FromS32),
1436            Type::U32 => self.emit(&I32FromU32),
1437            Type::S64 => self.emit(&I64FromS64),
1438            Type::U64 => self.emit(&I64FromU64),
1439            Type::Char => self.emit(&I32FromChar),
1440            Type::F32 => self.emit(&CoreF32FromF32),
1441            Type::F64 => self.emit(&CoreF64FromF64),
1442            Type::String => {
1443                let realloc = self.list_realloc();
1444                self.emit(&StringLower { realloc });
1445            }
1446            Type::ErrorContext => self.emit(&ErrorContextLower),
1447            Type::Id(id) => match &self.resolve.types[id].kind {
1448                TypeDefKind::Type(t) => self.lower(t),
1449                TypeDefKind::List(element) => {
1450                    let realloc = self.list_realloc();
1451                    if self.bindgen.is_list_canonical(self.resolve, element) {
1452                        self.emit(&ListCanonLower { element, realloc });
1453                    } else {
1454                        self.push_block();
1455                        self.emit(&IterElem { element });
1456                        self.emit(&IterBasePointer);
1457                        let addr = self.stack.pop().unwrap();
1458                        self.write_to_memory(element, addr, Default::default());
1459                        self.finish_block(0);
1460                        self.emit(&ListLower { element, realloc });
1461                    }
1462                }
1463                TypeDefKind::Handle(handle) => {
1464                    let (Handle::Own(ty) | Handle::Borrow(ty)) = handle;
1465                    self.emit(&HandleLower {
1466                        handle,
1467                        ty: id,
1468                        name: self.resolve.types[*ty].name.as_deref().unwrap(),
1469                    });
1470                }
1471                TypeDefKind::Resource => {
1472                    todo!();
1473                }
1474                TypeDefKind::Record(record) => {
1475                    self.emit(&RecordLower {
1476                        record,
1477                        ty: id,
1478                        name: self.resolve.types[id].name.as_deref().unwrap(),
1479                    });
1480                    let values = self
1481                        .stack
1482                        .drain(self.stack.len() - record.fields.len()..)
1483                        .collect::<Vec<_>>();
1484                    for (field, value) in record.fields.iter().zip(values) {
1485                        self.stack.push(value);
1486                        self.lower(&field.ty);
1487                    }
1488                }
1489                TypeDefKind::Tuple(tuple) => {
1490                    self.emit(&TupleLower { tuple, ty: id });
1491                    let values = self
1492                        .stack
1493                        .drain(self.stack.len() - tuple.types.len()..)
1494                        .collect::<Vec<_>>();
1495                    for (ty, value) in tuple.types.iter().zip(values) {
1496                        self.stack.push(value);
1497                        self.lower(ty);
1498                    }
1499                }
1500
1501                TypeDefKind::Flags(flags) => {
1502                    self.emit(&FlagsLower {
1503                        flags,
1504                        ty: id,
1505                        name: self.resolve.types[id].name.as_ref().unwrap(),
1506                    });
1507                }
1508
1509                TypeDefKind::Variant(v) => {
1510                    let results =
1511                        self.lower_variant_arms(ty, v.cases.iter().map(|c| c.ty.as_ref()));
1512                    self.emit(&VariantLower {
1513                        variant: v,
1514                        ty: id,
1515                        results: &results,
1516                        name: self.resolve.types[id].name.as_deref().unwrap(),
1517                    });
1518                }
1519                TypeDefKind::Enum(enum_) => {
1520                    self.emit(&EnumLower {
1521                        enum_,
1522                        ty: id,
1523                        name: self.resolve.types[id].name.as_deref().unwrap(),
1524                    });
1525                }
1526                TypeDefKind::Option(t) => {
1527                    let results = self.lower_variant_arms(ty, [None, Some(t)]);
1528                    self.emit(&OptionLower {
1529                        payload: t,
1530                        ty: id,
1531                        results: &results,
1532                    });
1533                }
1534                TypeDefKind::Result(r) => {
1535                    let results = self.lower_variant_arms(ty, [r.ok.as_ref(), r.err.as_ref()]);
1536                    self.emit(&ResultLower {
1537                        result: r,
1538                        ty: id,
1539                        results: &results,
1540                    });
1541                }
1542                TypeDefKind::Future(ty) => {
1543                    self.emit(&FutureLower {
1544                        payload: ty,
1545                        ty: id,
1546                    });
1547                }
1548                TypeDefKind::Stream(ty) => {
1549                    self.emit(&StreamLower {
1550                        payload: ty,
1551                        ty: id,
1552                    });
1553                }
1554                TypeDefKind::Unknown => unreachable!(),
1555                TypeDefKind::FixedSizeList(..) => todo!(),
1556            },
1557        }
1558    }
1559
1560    fn lower_variant_arms<'b>(
1561        &mut self,
1562        ty: &Type,
1563        cases: impl IntoIterator<Item = Option<&'b Type>>,
1564    ) -> Vec<WasmType> {
1565        use Instruction::*;
1566        let results = flat_types(self.resolve, ty, None).unwrap();
1567        let mut casts = Vec::new();
1568        for (i, ty) in cases.into_iter().enumerate() {
1569            self.push_block();
1570            self.emit(&VariantPayloadName);
1571            let payload_name = self.stack.pop().unwrap();
1572            self.emit(&I32Const { val: i as i32 });
1573            let mut pushed = 1;
1574            if let Some(ty) = ty {
1575                // Using the payload of this block we lower the type to
1576                // raw wasm values.
1577                self.stack.push(payload_name);
1578                self.lower(ty);
1579
1580                // Determine the types of all the wasm values we just
1581                // pushed, and record how many. If we pushed too few
1582                // then we'll need to push some zeros after this.
1583                let temp = flat_types(self.resolve, ty, None).unwrap();
1584                pushed += temp.len();
1585
1586                // For all the types pushed we may need to insert some
1587                // bitcasts. This will go through and cast everything
1588                // to the right type to ensure all blocks produce the
1589                // same set of results.
1590                casts.truncate(0);
1591                for (actual, expected) in temp.iter().zip(&results[1..]) {
1592                    casts.push(cast(*actual, *expected));
1593                }
1594                if casts.iter().any(|c| *c != Bitcast::None) {
1595                    self.emit(&Bitcasts { casts: &casts });
1596                }
1597            }
1598
1599            // If we haven't pushed enough items in this block to match
1600            // what other variants are pushing then we need to push
1601            // some zeros.
1602            if pushed < results.len() {
1603                self.emit(&ConstZero {
1604                    tys: &results[pushed..],
1605                });
1606            }
1607            self.finish_block(results.len());
1608        }
1609        results
1610    }
1611
1612    fn list_realloc(&self) -> Option<&'static str> {
1613        match self.realloc.expect("realloc should be configured") {
1614            Realloc::None => None,
1615            Realloc::Export(s) => Some(s),
1616        }
1617    }
1618
1619    /// Note that in general everything in this function is the opposite of the
1620    /// `lower` function above. This is intentional and should be kept this way!
1621    fn lift(&mut self, ty: &Type) {
1622        use Instruction::*;
1623
1624        match *ty {
1625            Type::Bool => self.emit(&BoolFromI32),
1626            Type::S8 => self.emit(&S8FromI32),
1627            Type::U8 => self.emit(&U8FromI32),
1628            Type::S16 => self.emit(&S16FromI32),
1629            Type::U16 => self.emit(&U16FromI32),
1630            Type::S32 => self.emit(&S32FromI32),
1631            Type::U32 => self.emit(&U32FromI32),
1632            Type::S64 => self.emit(&S64FromI64),
1633            Type::U64 => self.emit(&U64FromI64),
1634            Type::Char => self.emit(&CharFromI32),
1635            Type::F32 => self.emit(&F32FromCoreF32),
1636            Type::F64 => self.emit(&F64FromCoreF64),
1637            Type::String => self.emit(&StringLift),
1638            Type::ErrorContext => self.emit(&ErrorContextLift),
1639            Type::Id(id) => match &self.resolve.types[id].kind {
1640                TypeDefKind::Type(t) => self.lift(t),
1641                TypeDefKind::List(element) => {
1642                    if self.bindgen.is_list_canonical(self.resolve, element) {
1643                        self.emit(&ListCanonLift { element, ty: id });
1644                    } else {
1645                        self.push_block();
1646                        self.emit(&IterBasePointer);
1647                        let addr = self.stack.pop().unwrap();
1648                        self.read_from_memory(element, addr, Default::default());
1649                        self.finish_block(1);
1650                        self.emit(&ListLift { element, ty: id });
1651                    }
1652                }
1653                TypeDefKind::Handle(handle) => {
1654                    let (Handle::Own(ty) | Handle::Borrow(ty)) = handle;
1655                    self.emit(&HandleLift {
1656                        handle,
1657                        ty: id,
1658                        name: self.resolve.types[*ty].name.as_deref().unwrap(),
1659                    });
1660                }
1661                TypeDefKind::Resource => {
1662                    todo!();
1663                }
1664                TypeDefKind::Record(record) => {
1665                    self.flat_for_each_record_type(
1666                        ty,
1667                        record.fields.iter().map(|f| &f.ty),
1668                        Self::lift,
1669                    );
1670                    self.emit(&RecordLift {
1671                        record,
1672                        ty: id,
1673                        name: self.resolve.types[id].name.as_deref().unwrap(),
1674                    });
1675                }
1676                TypeDefKind::Tuple(tuple) => {
1677                    self.flat_for_each_record_type(ty, tuple.types.iter(), Self::lift);
1678                    self.emit(&TupleLift { tuple, ty: id });
1679                }
1680                TypeDefKind::Flags(flags) => {
1681                    self.emit(&FlagsLift {
1682                        flags,
1683                        ty: id,
1684                        name: self.resolve.types[id].name.as_ref().unwrap(),
1685                    });
1686                }
1687
1688                TypeDefKind::Variant(v) => {
1689                    self.flat_for_each_variant_arm(
1690                        ty,
1691                        true,
1692                        v.cases.iter().map(|c| c.ty.as_ref()),
1693                        Self::lift,
1694                    );
1695                    self.emit(&VariantLift {
1696                        variant: v,
1697                        ty: id,
1698                        name: self.resolve.types[id].name.as_deref().unwrap(),
1699                    });
1700                }
1701
1702                TypeDefKind::Enum(enum_) => {
1703                    self.emit(&EnumLift {
1704                        enum_,
1705                        ty: id,
1706                        name: self.resolve.types[id].name.as_deref().unwrap(),
1707                    });
1708                }
1709
1710                TypeDefKind::Option(t) => {
1711                    self.flat_for_each_variant_arm(ty, true, [None, Some(t)], Self::lift);
1712                    self.emit(&OptionLift { payload: t, ty: id });
1713                }
1714
1715                TypeDefKind::Result(r) => {
1716                    self.flat_for_each_variant_arm(
1717                        ty,
1718                        true,
1719                        [r.ok.as_ref(), r.err.as_ref()],
1720                        Self::lift,
1721                    );
1722                    self.emit(&ResultLift { result: r, ty: id });
1723                }
1724
1725                TypeDefKind::Future(ty) => {
1726                    self.emit(&FutureLift {
1727                        payload: ty,
1728                        ty: id,
1729                    });
1730                }
1731                TypeDefKind::Stream(ty) => {
1732                    self.emit(&StreamLift {
1733                        payload: ty,
1734                        ty: id,
1735                    });
1736                }
1737                TypeDefKind::Unknown => unreachable!(),
1738                TypeDefKind::FixedSizeList(..) => todo!(),
1739            },
1740        }
1741    }
1742
1743    fn flat_for_each_record_type<'b>(
1744        &mut self,
1745        container: &Type,
1746        types: impl Iterator<Item = &'b Type>,
1747        mut iter: impl FnMut(&mut Self, &Type),
1748    ) {
1749        let temp = flat_types(self.resolve, container, None).unwrap();
1750        let mut args = self
1751            .stack
1752            .drain(self.stack.len() - temp.len()..)
1753            .collect::<Vec<_>>();
1754        for ty in types {
1755            let temp = flat_types(self.resolve, ty, None).unwrap();
1756            self.stack.extend(args.drain(..temp.len()));
1757            iter(self, ty);
1758        }
1759    }
1760
1761    fn flat_for_each_variant_arm<'b>(
1762        &mut self,
1763        ty: &Type,
1764        blocks_with_type_have_result: bool,
1765        cases: impl IntoIterator<Item = Option<&'b Type>>,
1766        mut iter: impl FnMut(&mut Self, &Type),
1767    ) {
1768        let params = flat_types(self.resolve, ty, None).unwrap();
1769        let mut casts = Vec::new();
1770        let block_inputs = self
1771            .stack
1772            .drain(self.stack.len() + 1 - params.len()..)
1773            .collect::<Vec<_>>();
1774        for ty in cases {
1775            self.push_block();
1776            if let Some(ty) = ty {
1777                // Push only the values we need for this variant onto
1778                // the stack.
1779                let temp = flat_types(self.resolve, ty, None).unwrap();
1780                self.stack
1781                    .extend(block_inputs[..temp.len()].iter().cloned());
1782
1783                // Cast all the types we have on the stack to the actual
1784                // types needed for this variant, if necessary.
1785                casts.truncate(0);
1786                for (actual, expected) in temp.iter().zip(&params[1..]) {
1787                    casts.push(cast(*expected, *actual));
1788                }
1789                if casts.iter().any(|c| *c != Bitcast::None) {
1790                    self.emit(&Instruction::Bitcasts { casts: &casts });
1791                }
1792
1793                // Then recursively lift this variant's payload.
1794                iter(self, ty);
1795            }
1796            self.finish_block(if blocks_with_type_have_result {
1797                ty.is_some() as usize
1798            } else {
1799                0
1800            });
1801        }
1802    }
1803
1804    fn write_to_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1805        use Instruction::*;
1806
1807        match *ty {
1808            // Builtin types need different flavors of storage instructions
1809            // depending on the size of the value written.
1810            Type::Bool | Type::U8 | Type::S8 => {
1811                self.lower_and_emit(ty, addr, &I32Store8 { offset })
1812            }
1813            Type::U16 | Type::S16 => self.lower_and_emit(ty, addr, &I32Store16 { offset }),
1814            Type::U32 | Type::S32 | Type::Char => {
1815                self.lower_and_emit(ty, addr, &I32Store { offset })
1816            }
1817            Type::U64 | Type::S64 => self.lower_and_emit(ty, addr, &I64Store { offset }),
1818            Type::F32 => self.lower_and_emit(ty, addr, &F32Store { offset }),
1819            Type::F64 => self.lower_and_emit(ty, addr, &F64Store { offset }),
1820            Type::String => self.write_list_to_memory(ty, addr, offset),
1821            Type::ErrorContext => self.lower_and_emit(ty, addr, &I32Store { offset }),
1822
1823            Type::Id(id) => match &self.resolve.types[id].kind {
1824                TypeDefKind::Type(t) => self.write_to_memory(t, addr, offset),
1825                TypeDefKind::List(_) => self.write_list_to_memory(ty, addr, offset),
1826
1827                TypeDefKind::Future(_) | TypeDefKind::Stream(_) | TypeDefKind::Handle(_) => {
1828                    self.lower_and_emit(ty, addr, &I32Store { offset })
1829                }
1830
1831                // Decompose the record into its components and then write all
1832                // the components into memory one-by-one.
1833                TypeDefKind::Record(record) => {
1834                    self.emit(&RecordLower {
1835                        record,
1836                        ty: id,
1837                        name: self.resolve.types[id].name.as_deref().unwrap(),
1838                    });
1839                    self.write_fields_to_memory(record.fields.iter().map(|f| &f.ty), addr, offset);
1840                }
1841                TypeDefKind::Resource => {
1842                    todo!()
1843                }
1844                TypeDefKind::Tuple(tuple) => {
1845                    self.emit(&TupleLower { tuple, ty: id });
1846                    self.write_fields_to_memory(tuple.types.iter(), addr, offset);
1847                }
1848
1849                TypeDefKind::Flags(f) => {
1850                    self.lower(ty);
1851                    match f.repr() {
1852                        FlagsRepr::U8 => {
1853                            self.stack.push(addr);
1854                            self.store_intrepr(offset, Int::U8);
1855                        }
1856                        FlagsRepr::U16 => {
1857                            self.stack.push(addr);
1858                            self.store_intrepr(offset, Int::U16);
1859                        }
1860                        FlagsRepr::U32(n) => {
1861                            for i in (0..n).rev() {
1862                                self.stack.push(addr.clone());
1863                                self.emit(&I32Store {
1864                                    offset: offset.add_bytes(i * 4),
1865                                });
1866                            }
1867                        }
1868                    }
1869                }
1870
1871                // Each case will get its own block, and the first item in each
1872                // case is writing the discriminant. After that if we have a
1873                // payload we write the payload after the discriminant, aligned up
1874                // to the type's alignment.
1875                TypeDefKind::Variant(v) => {
1876                    self.write_variant_arms_to_memory(
1877                        offset,
1878                        addr,
1879                        v.tag(),
1880                        v.cases.iter().map(|c| c.ty.as_ref()),
1881                    );
1882                    self.emit(&VariantLower {
1883                        variant: v,
1884                        ty: id,
1885                        results: &[],
1886                        name: self.resolve.types[id].name.as_deref().unwrap(),
1887                    });
1888                }
1889
1890                TypeDefKind::Option(t) => {
1891                    self.write_variant_arms_to_memory(offset, addr, Int::U8, [None, Some(t)]);
1892                    self.emit(&OptionLower {
1893                        payload: t,
1894                        ty: id,
1895                        results: &[],
1896                    });
1897                }
1898
1899                TypeDefKind::Result(r) => {
1900                    self.write_variant_arms_to_memory(
1901                        offset,
1902                        addr,
1903                        Int::U8,
1904                        [r.ok.as_ref(), r.err.as_ref()],
1905                    );
1906                    self.emit(&ResultLower {
1907                        result: r,
1908                        ty: id,
1909                        results: &[],
1910                    });
1911                }
1912
1913                TypeDefKind::Enum(e) => {
1914                    self.lower(ty);
1915                    self.stack.push(addr);
1916                    self.store_intrepr(offset, e.tag());
1917                }
1918
1919                TypeDefKind::Unknown => unreachable!(),
1920                TypeDefKind::FixedSizeList(..) => todo!(),
1921            },
1922        }
1923    }
1924
1925    fn write_params_to_memory<'b>(
1926        &mut self,
1927        params: impl IntoIterator<Item = &'b Type, IntoIter: ExactSizeIterator>,
1928        addr: B::Operand,
1929        offset: ArchitectureSize,
1930    ) {
1931        self.write_fields_to_memory(params, addr, offset);
1932    }
1933
1934    fn write_variant_arms_to_memory<'b>(
1935        &mut self,
1936        offset: ArchitectureSize,
1937        addr: B::Operand,
1938        tag: Int,
1939        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
1940    ) {
1941        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
1942        for (i, ty) in cases.into_iter().enumerate() {
1943            self.push_block();
1944            self.emit(&Instruction::VariantPayloadName);
1945            let payload_name = self.stack.pop().unwrap();
1946            self.emit(&Instruction::I32Const { val: i as i32 });
1947            self.stack.push(addr.clone());
1948            self.store_intrepr(offset, tag);
1949            if let Some(ty) = ty {
1950                self.stack.push(payload_name.clone());
1951                self.write_to_memory(ty, addr.clone(), payload_offset);
1952            }
1953            self.finish_block(0);
1954        }
1955    }
1956
1957    fn write_list_to_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1958        // After lowering the list there's two i32 values on the stack
1959        // which we write into memory, writing the pointer into the low address
1960        // and the length into the high address.
1961        self.lower(ty);
1962        self.stack.push(addr.clone());
1963        self.emit(&Instruction::LengthStore {
1964            offset: offset + self.bindgen.sizes().align(ty).into(),
1965        });
1966        self.stack.push(addr);
1967        self.emit(&Instruction::PointerStore { offset });
1968    }
1969
1970    fn write_fields_to_memory<'b>(
1971        &mut self,
1972        tys: impl IntoIterator<Item = &'b Type, IntoIter: ExactSizeIterator>,
1973        addr: B::Operand,
1974        offset: ArchitectureSize,
1975    ) {
1976        let tys = tys.into_iter();
1977        let fields = self
1978            .stack
1979            .drain(self.stack.len() - tys.len()..)
1980            .collect::<Vec<_>>();
1981        for ((field_offset, ty), op) in self
1982            .bindgen
1983            .sizes()
1984            .field_offsets(tys)
1985            .into_iter()
1986            .zip(fields)
1987        {
1988            self.stack.push(op);
1989            self.write_to_memory(ty, addr.clone(), offset + (field_offset));
1990        }
1991    }
1992
1993    fn lower_and_emit(&mut self, ty: &Type, addr: B::Operand, instr: &Instruction) {
1994        self.lower(ty);
1995        self.stack.push(addr);
1996        self.emit(instr);
1997    }
1998
1999    fn read_from_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
2000        use Instruction::*;
2001
2002        match *ty {
2003            Type::Bool => self.emit_and_lift(ty, addr, &I32Load8U { offset }),
2004            Type::U8 => self.emit_and_lift(ty, addr, &I32Load8U { offset }),
2005            Type::S8 => self.emit_and_lift(ty, addr, &I32Load8S { offset }),
2006            Type::U16 => self.emit_and_lift(ty, addr, &I32Load16U { offset }),
2007            Type::S16 => self.emit_and_lift(ty, addr, &I32Load16S { offset }),
2008            Type::U32 | Type::S32 | Type::Char => self.emit_and_lift(ty, addr, &I32Load { offset }),
2009            Type::U64 | Type::S64 => self.emit_and_lift(ty, addr, &I64Load { offset }),
2010            Type::F32 => self.emit_and_lift(ty, addr, &F32Load { offset }),
2011            Type::F64 => self.emit_and_lift(ty, addr, &F64Load { offset }),
2012            Type::String => self.read_list_from_memory(ty, addr, offset),
2013            Type::ErrorContext => self.emit_and_lift(ty, addr, &I32Load { offset }),
2014
2015            Type::Id(id) => match &self.resolve.types[id].kind {
2016                TypeDefKind::Type(t) => self.read_from_memory(t, addr, offset),
2017
2018                TypeDefKind::List(_) => self.read_list_from_memory(ty, addr, offset),
2019
2020                TypeDefKind::Future(_) | TypeDefKind::Stream(_) | TypeDefKind::Handle(_) => {
2021                    self.emit_and_lift(ty, addr, &I32Load { offset })
2022                }
2023
2024                TypeDefKind::Resource => {
2025                    todo!();
2026                }
2027
2028                // Read and lift each field individually, adjusting the offset
2029                // as we go along, then aggregate all the fields into the
2030                // record.
2031                TypeDefKind::Record(record) => {
2032                    self.read_fields_from_memory(record.fields.iter().map(|f| &f.ty), addr, offset);
2033                    self.emit(&RecordLift {
2034                        record,
2035                        ty: id,
2036                        name: self.resolve.types[id].name.as_deref().unwrap(),
2037                    });
2038                }
2039
2040                TypeDefKind::Tuple(tuple) => {
2041                    self.read_fields_from_memory(&tuple.types, addr, offset);
2042                    self.emit(&TupleLift { tuple, ty: id });
2043                }
2044
2045                TypeDefKind::Flags(f) => {
2046                    match f.repr() {
2047                        FlagsRepr::U8 => {
2048                            self.stack.push(addr);
2049                            self.load_intrepr(offset, Int::U8);
2050                        }
2051                        FlagsRepr::U16 => {
2052                            self.stack.push(addr);
2053                            self.load_intrepr(offset, Int::U16);
2054                        }
2055                        FlagsRepr::U32(n) => {
2056                            for i in 0..n {
2057                                self.stack.push(addr.clone());
2058                                self.emit(&I32Load {
2059                                    offset: offset.add_bytes(i * 4),
2060                                });
2061                            }
2062                        }
2063                    }
2064                    self.lift(ty);
2065                }
2066
2067                // Each case will get its own block, and we'll dispatch to the
2068                // right block based on the `i32.load` we initially perform. Each
2069                // individual block is pretty simple and just reads the payload type
2070                // from the corresponding offset if one is available.
2071                TypeDefKind::Variant(variant) => {
2072                    self.read_variant_arms_from_memory(
2073                        offset,
2074                        addr,
2075                        variant.tag(),
2076                        variant.cases.iter().map(|c| c.ty.as_ref()),
2077                    );
2078                    self.emit(&VariantLift {
2079                        variant,
2080                        ty: id,
2081                        name: self.resolve.types[id].name.as_deref().unwrap(),
2082                    });
2083                }
2084
2085                TypeDefKind::Option(t) => {
2086                    self.read_variant_arms_from_memory(offset, addr, Int::U8, [None, Some(t)]);
2087                    self.emit(&OptionLift { payload: t, ty: id });
2088                }
2089
2090                TypeDefKind::Result(r) => {
2091                    self.read_variant_arms_from_memory(
2092                        offset,
2093                        addr,
2094                        Int::U8,
2095                        [r.ok.as_ref(), r.err.as_ref()],
2096                    );
2097                    self.emit(&ResultLift { result: r, ty: id });
2098                }
2099
2100                TypeDefKind::Enum(e) => {
2101                    self.stack.push(addr.clone());
2102                    self.load_intrepr(offset, e.tag());
2103                    self.lift(ty);
2104                }
2105
2106                TypeDefKind::Unknown => unreachable!(),
2107                TypeDefKind::FixedSizeList(..) => todo!(),
2108            },
2109        }
2110    }
2111
2112    fn read_results_from_memory(
2113        &mut self,
2114        result: &Option<Type>,
2115        addr: B::Operand,
2116        offset: ArchitectureSize,
2117    ) {
2118        self.read_fields_from_memory(result, addr, offset)
2119    }
2120
2121    fn read_variant_arms_from_memory<'b>(
2122        &mut self,
2123        offset: ArchitectureSize,
2124        addr: B::Operand,
2125        tag: Int,
2126        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
2127    ) {
2128        self.stack.push(addr.clone());
2129        self.load_intrepr(offset, tag);
2130        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
2131        for ty in cases {
2132            self.push_block();
2133            if let Some(ty) = ty {
2134                self.read_from_memory(ty, addr.clone(), payload_offset);
2135            }
2136            self.finish_block(ty.is_some() as usize);
2137        }
2138    }
2139
2140    fn read_list_from_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
2141        // Read the pointer/len and then perform the standard lifting
2142        // proceses.
2143        self.stack.push(addr.clone());
2144        self.emit(&Instruction::PointerLoad { offset });
2145        self.stack.push(addr);
2146        self.emit(&Instruction::LengthLoad {
2147            offset: offset + self.bindgen.sizes().align(ty).into(),
2148        });
2149        self.lift(ty);
2150    }
2151
2152    fn read_fields_from_memory<'b>(
2153        &mut self,
2154        tys: impl IntoIterator<Item = &'b Type>,
2155        addr: B::Operand,
2156        offset: ArchitectureSize,
2157    ) {
2158        for (field_offset, ty) in self.bindgen.sizes().field_offsets(tys).iter() {
2159            self.read_from_memory(ty, addr.clone(), offset + (*field_offset));
2160        }
2161    }
2162
2163    fn emit_and_lift(&mut self, ty: &Type, addr: B::Operand, instr: &Instruction) {
2164        self.stack.push(addr);
2165        self.emit(instr);
2166        self.lift(ty);
2167    }
2168
2169    fn load_intrepr(&mut self, offset: ArchitectureSize, repr: Int) {
2170        self.emit(&match repr {
2171            Int::U64 => Instruction::I64Load { offset },
2172            Int::U32 => Instruction::I32Load { offset },
2173            Int::U16 => Instruction::I32Load16U { offset },
2174            Int::U8 => Instruction::I32Load8U { offset },
2175        });
2176    }
2177
2178    fn store_intrepr(&mut self, offset: ArchitectureSize, repr: Int) {
2179        self.emit(&match repr {
2180            Int::U64 => Instruction::I64Store { offset },
2181            Int::U32 => Instruction::I32Store { offset },
2182            Int::U16 => Instruction::I32Store16 { offset },
2183            Int::U8 => Instruction::I32Store8 { offset },
2184        });
2185    }
2186
2187    /// Runs the deallocation of `ty` for the operands currently on
2188    /// `self.stack`.
2189    ///
2190    /// This will pop the ABI items of `ty` from `self.stack`.
2191    fn deallocate(&mut self, ty: &Type, what: Deallocate) {
2192        use Instruction::*;
2193
2194        match *ty {
2195            Type::String => {
2196                self.emit(&Instruction::GuestDeallocateString);
2197            }
2198
2199            Type::Bool
2200            | Type::U8
2201            | Type::S8
2202            | Type::U16
2203            | Type::S16
2204            | Type::U32
2205            | Type::S32
2206            | Type::Char
2207            | Type::U64
2208            | Type::S64
2209            | Type::F32
2210            | Type::F64
2211            | Type::ErrorContext => {
2212                // No deallocation necessary, just discard the operand on the
2213                // stack.
2214                self.stack.pop().unwrap();
2215            }
2216
2217            Type::Id(id) => match &self.resolve.types[id].kind {
2218                TypeDefKind::Type(t) => self.deallocate(t, what),
2219
2220                TypeDefKind::List(element) => {
2221                    self.push_block();
2222                    self.emit(&IterBasePointer);
2223                    let elemaddr = self.stack.pop().unwrap();
2224                    self.deallocate_indirect(element, elemaddr, Default::default(), what);
2225                    self.finish_block(0);
2226
2227                    self.emit(&Instruction::GuestDeallocateList { element });
2228                }
2229
2230                TypeDefKind::Handle(Handle::Own(_))
2231                | TypeDefKind::Future(_)
2232                | TypeDefKind::Stream(_)
2233                    if what.handles() =>
2234                {
2235                    self.lift(ty);
2236                    self.emit(&DropHandle { ty });
2237                }
2238
2239                TypeDefKind::Record(record) => {
2240                    self.flat_for_each_record_type(
2241                        ty,
2242                        record.fields.iter().map(|f| &f.ty),
2243                        |me, ty| me.deallocate(ty, what),
2244                    );
2245                }
2246
2247                TypeDefKind::Tuple(tuple) => {
2248                    self.flat_for_each_record_type(ty, tuple.types.iter(), |me, ty| {
2249                        me.deallocate(ty, what)
2250                    });
2251                }
2252
2253                TypeDefKind::Variant(variant) => {
2254                    self.flat_for_each_variant_arm(
2255                        ty,
2256                        false,
2257                        variant.cases.iter().map(|c| c.ty.as_ref()),
2258                        |me, ty| me.deallocate(ty, what),
2259                    );
2260                    self.emit(&GuestDeallocateVariant {
2261                        blocks: variant.cases.len(),
2262                    });
2263                }
2264
2265                TypeDefKind::Option(t) => {
2266                    self.flat_for_each_variant_arm(ty, false, [None, Some(t)], |me, ty| {
2267                        me.deallocate(ty, what)
2268                    });
2269                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2270                }
2271
2272                TypeDefKind::Result(e) => {
2273                    self.flat_for_each_variant_arm(
2274                        ty,
2275                        false,
2276                        [e.ok.as_ref(), e.err.as_ref()],
2277                        |me, ty| me.deallocate(ty, what),
2278                    );
2279                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2280                }
2281
2282                // discard the operand on the stack, otherwise nothing to free.
2283                TypeDefKind::Flags(_)
2284                | TypeDefKind::Enum(_)
2285                | TypeDefKind::Future(_)
2286                | TypeDefKind::Stream(_)
2287                | TypeDefKind::Handle(Handle::Own(_))
2288                | TypeDefKind::Handle(Handle::Borrow(_)) => {
2289                    self.stack.pop().unwrap();
2290                }
2291
2292                TypeDefKind::Resource => unreachable!(),
2293                TypeDefKind::Unknown => unreachable!(),
2294
2295                TypeDefKind::FixedSizeList(..) => todo!(),
2296            },
2297        }
2298    }
2299
2300    fn deallocate_indirect(
2301        &mut self,
2302        ty: &Type,
2303        addr: B::Operand,
2304        offset: ArchitectureSize,
2305        what: Deallocate,
2306    ) {
2307        use Instruction::*;
2308
2309        // No need to execute any instructions if this type itself doesn't
2310        // require any form of post-return.
2311        if !needs_deallocate(self.resolve, ty, what) {
2312            return;
2313        }
2314
2315        match *ty {
2316            Type::String => {
2317                self.stack.push(addr.clone());
2318                self.emit(&Instruction::PointerLoad { offset });
2319                self.stack.push(addr);
2320                self.emit(&Instruction::LengthLoad {
2321                    offset: offset + self.bindgen.sizes().align(ty).into(),
2322                });
2323                self.deallocate(ty, what);
2324            }
2325
2326            Type::Bool
2327            | Type::U8
2328            | Type::S8
2329            | Type::U16
2330            | Type::S16
2331            | Type::U32
2332            | Type::S32
2333            | Type::Char
2334            | Type::U64
2335            | Type::S64
2336            | Type::F32
2337            | Type::F64
2338            | Type::ErrorContext => {}
2339
2340            Type::Id(id) => match &self.resolve.types[id].kind {
2341                TypeDefKind::Type(t) => self.deallocate_indirect(t, addr, offset, what),
2342
2343                TypeDefKind::List(_) => {
2344                    self.stack.push(addr.clone());
2345                    self.emit(&Instruction::PointerLoad { offset });
2346                    self.stack.push(addr);
2347                    self.emit(&Instruction::LengthLoad {
2348                        offset: offset + self.bindgen.sizes().align(ty).into(),
2349                    });
2350
2351                    self.deallocate(ty, what);
2352                }
2353
2354                TypeDefKind::Handle(Handle::Own(_))
2355                | TypeDefKind::Future(_)
2356                | TypeDefKind::Stream(_)
2357                    if what.handles() =>
2358                {
2359                    self.read_from_memory(ty, addr, offset);
2360                    self.emit(&DropHandle { ty });
2361                }
2362
2363                TypeDefKind::Handle(Handle::Own(_)) => unreachable!(),
2364                TypeDefKind::Handle(Handle::Borrow(_)) => unreachable!(),
2365                TypeDefKind::Resource => unreachable!(),
2366
2367                TypeDefKind::Record(record) => {
2368                    self.deallocate_indirect_fields(
2369                        &record.fields.iter().map(|f| f.ty).collect::<Vec<_>>(),
2370                        addr,
2371                        offset,
2372                        what,
2373                    );
2374                }
2375
2376                TypeDefKind::Tuple(tuple) => {
2377                    self.deallocate_indirect_fields(&tuple.types, addr, offset, what);
2378                }
2379
2380                TypeDefKind::Flags(_) => {}
2381
2382                TypeDefKind::Variant(variant) => {
2383                    self.deallocate_indirect_variant(
2384                        offset,
2385                        addr,
2386                        variant.tag(),
2387                        variant.cases.iter().map(|c| c.ty.as_ref()),
2388                        what,
2389                    );
2390                    self.emit(&GuestDeallocateVariant {
2391                        blocks: variant.cases.len(),
2392                    });
2393                }
2394
2395                TypeDefKind::Option(t) => {
2396                    self.deallocate_indirect_variant(offset, addr, Int::U8, [None, Some(t)], what);
2397                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2398                }
2399
2400                TypeDefKind::Result(e) => {
2401                    self.deallocate_indirect_variant(
2402                        offset,
2403                        addr,
2404                        Int::U8,
2405                        [e.ok.as_ref(), e.err.as_ref()],
2406                        what,
2407                    );
2408                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2409                }
2410
2411                TypeDefKind::Enum(_) => {}
2412
2413                TypeDefKind::Future(_) => unreachable!(),
2414                TypeDefKind::Stream(_) => unreachable!(),
2415                TypeDefKind::Unknown => unreachable!(),
2416                TypeDefKind::FixedSizeList(..) => todo!(),
2417            },
2418        }
2419    }
2420
2421    fn deallocate_indirect_variant<'b>(
2422        &mut self,
2423        offset: ArchitectureSize,
2424        addr: B::Operand,
2425        tag: Int,
2426        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
2427        what: Deallocate,
2428    ) {
2429        self.stack.push(addr.clone());
2430        self.load_intrepr(offset, tag);
2431        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
2432        for ty in cases {
2433            self.push_block();
2434            if let Some(ty) = ty {
2435                self.deallocate_indirect(ty, addr.clone(), payload_offset, what);
2436            }
2437            self.finish_block(0);
2438        }
2439    }
2440
2441    fn deallocate_indirect_fields(
2442        &mut self,
2443        tys: &[Type],
2444        addr: B::Operand,
2445        offset: ArchitectureSize,
2446        what: Deallocate,
2447    ) {
2448        for (field_offset, ty) in self.bindgen.sizes().field_offsets(tys) {
2449            self.deallocate_indirect(ty, addr.clone(), offset + (field_offset), what);
2450        }
2451    }
2452}
2453
2454fn cast(from: WasmType, to: WasmType) -> Bitcast {
2455    use WasmType::*;
2456
2457    match (from, to) {
2458        (I32, I32)
2459        | (I64, I64)
2460        | (F32, F32)
2461        | (F64, F64)
2462        | (Pointer, Pointer)
2463        | (PointerOrI64, PointerOrI64)
2464        | (Length, Length) => Bitcast::None,
2465
2466        (I32, I64) => Bitcast::I32ToI64,
2467        (F32, I32) => Bitcast::F32ToI32,
2468        (F64, I64) => Bitcast::F64ToI64,
2469
2470        (I64, I32) => Bitcast::I64ToI32,
2471        (I32, F32) => Bitcast::I32ToF32,
2472        (I64, F64) => Bitcast::I64ToF64,
2473
2474        (F32, I64) => Bitcast::F32ToI64,
2475        (I64, F32) => Bitcast::I64ToF32,
2476
2477        (I64, PointerOrI64) => Bitcast::I64ToP64,
2478        (Pointer, PointerOrI64) => Bitcast::PToP64,
2479        (_, PointerOrI64) => {
2480            Bitcast::Sequence(Box::new([cast(from, I64), cast(I64, PointerOrI64)]))
2481        }
2482
2483        (PointerOrI64, I64) => Bitcast::P64ToI64,
2484        (PointerOrI64, Pointer) => Bitcast::P64ToP,
2485        (PointerOrI64, _) => Bitcast::Sequence(Box::new([cast(PointerOrI64, I64), cast(I64, to)])),
2486
2487        (I32, Pointer) => Bitcast::I32ToP,
2488        (Pointer, I32) => Bitcast::PToI32,
2489        (I32, Length) => Bitcast::I32ToL,
2490        (Length, I32) => Bitcast::LToI32,
2491        (I64, Length) => Bitcast::I64ToL,
2492        (Length, I64) => Bitcast::LToI64,
2493        (Pointer, Length) => Bitcast::PToL,
2494        (Length, Pointer) => Bitcast::LToP,
2495
2496        (F32, Pointer | Length) => Bitcast::Sequence(Box::new([cast(F32, I32), cast(I32, to)])),
2497        (Pointer | Length, F32) => Bitcast::Sequence(Box::new([cast(from, I32), cast(I32, F32)])),
2498
2499        (F32, F64)
2500        | (F64, F32)
2501        | (F64, I32)
2502        | (I32, F64)
2503        | (Pointer | Length, I64 | F64)
2504        | (I64 | F64, Pointer | Length) => {
2505            unreachable!("Don't know how to bitcast from {:?} to {:?}", from, to);
2506        }
2507    }
2508}
2509
2510/// Flatten types in a given type
2511///
2512/// It is sometimes necessary to restrict the number of max parameters dynamically,
2513/// for example during an async guest import call (flat params are limited to 4)
2514fn flat_types(resolve: &Resolve, ty: &Type, max_params: Option<usize>) -> Option<Vec<WasmType>> {
2515    let max_params = max_params.unwrap_or(MAX_FLAT_PARAMS);
2516    let mut storage = iter::repeat_n(WasmType::I32, max_params).collect::<Vec<_>>();
2517    let mut flat = FlatTypes::new(storage.as_mut_slice());
2518    resolve.push_flat(ty, &mut flat).then_some(flat.to_vec())
2519}