wit_bindgen_core/
abi.rs

1use std::fmt;
2use std::iter;
3
4pub use wit_parser::abi::{AbiVariant, FlatTypes, WasmSignature, WasmType};
5use wit_parser::{
6    align_to_arch, Alignment, ArchitectureSize, ElementInfo, Enum, Flags, FlagsRepr, Function,
7    Handle, Int, Record, Resolve, Result_, SizeAlign, Tuple, Type, TypeDefKind, TypeId, Variant,
8};
9
10// Helper macro for defining instructions without having to have tons of
11// exhaustive `match` statements to update
12macro_rules! def_instruction {
13    (
14        $( #[$enum_attr:meta] )*
15        pub enum $name:ident<'a> {
16            $(
17                $( #[$attr:meta] )*
18                $variant:ident $( {
19                    $($field:ident : $field_ty:ty $(,)* )*
20                } )?
21                    :
22                [$num_popped:expr] => [$num_pushed:expr],
23            )*
24        }
25    ) => {
26        $( #[$enum_attr] )*
27        pub enum $name<'a> {
28            $(
29                $( #[$attr] )*
30                $variant $( {
31                    $(
32                        $field : $field_ty,
33                    )*
34                } )? ,
35            )*
36        }
37
38        impl $name<'_> {
39            /// How many operands does this instruction pop from the stack?
40            #[allow(unused_variables)]
41            pub fn operands_len(&self) -> usize {
42                match self {
43                    $(
44                        Self::$variant $( {
45                            $(
46                                $field,
47                            )*
48                        } )? => $num_popped,
49                    )*
50                }
51            }
52
53            /// How many results does this instruction push onto the stack?
54            #[allow(unused_variables)]
55            pub fn results_len(&self) -> usize {
56                match self {
57                    $(
58                        Self::$variant $( {
59                            $(
60                                $field,
61                            )*
62                        } )? => $num_pushed,
63                    )*
64                }
65            }
66        }
67    };
68}
69
70def_instruction! {
71    #[derive(Debug)]
72    pub enum Instruction<'a> {
73        /// Acquires the specified parameter and places it on the stack.
74        /// Depending on the context this may refer to wasm parameters or
75        /// interface types parameters.
76        GetArg { nth: usize } : [0] => [1],
77
78        // Integer const/manipulation instructions
79
80        /// Pushes the constant `val` onto the stack.
81        I32Const { val: i32 } : [0] => [1],
82        /// Casts the top N items on the stack using the `Bitcast` enum
83        /// provided. Consumes the same number of operands that this produces.
84        Bitcasts { casts: &'a [Bitcast] } : [casts.len()] => [casts.len()],
85        /// Pushes a number of constant zeros for each wasm type on the stack.
86        ConstZero { tys: &'a [WasmType] } : [0] => [tys.len()],
87
88        // Memory load/store instructions
89
90        /// Pops a pointer from the stack and loads a little-endian `i32` from
91        /// it, using the specified constant offset.
92        I32Load { offset: ArchitectureSize } : [1] => [1],
93        /// Pops a pointer from the stack and loads a little-endian `i8` from
94        /// it, using the specified constant offset. The value loaded is the
95        /// zero-extended to 32-bits
96        I32Load8U { offset: ArchitectureSize } : [1] => [1],
97        /// Pops a pointer from the stack and loads a little-endian `i8` from
98        /// it, using the specified constant offset. The value loaded is the
99        /// sign-extended to 32-bits
100        I32Load8S { offset: ArchitectureSize } : [1] => [1],
101        /// Pops a pointer from the stack and loads a little-endian `i16` from
102        /// it, using the specified constant offset. The value loaded is the
103        /// zero-extended to 32-bits
104        I32Load16U { offset: ArchitectureSize } : [1] => [1],
105        /// Pops a pointer from the stack and loads a little-endian `i16` from
106        /// it, using the specified constant offset. The value loaded is the
107        /// sign-extended to 32-bits
108        I32Load16S { offset: ArchitectureSize } : [1] => [1],
109        /// Pops a pointer from the stack and loads a little-endian `i64` from
110        /// it, using the specified constant offset.
111        I64Load { offset: ArchitectureSize } : [1] => [1],
112        /// Pops a pointer from the stack and loads a little-endian `f32` from
113        /// it, using the specified constant offset.
114        F32Load { offset: ArchitectureSize } : [1] => [1],
115        /// Pops a pointer from the stack and loads a little-endian `f64` from
116        /// it, using the specified constant offset.
117        F64Load { offset: ArchitectureSize } : [1] => [1],
118
119        /// Like `I32Load` or `I64Load`, but for loading pointer values.
120        PointerLoad { offset: ArchitectureSize } : [1] => [1],
121        /// Like `I32Load` or `I64Load`, but for loading array length values.
122        LengthLoad { offset: ArchitectureSize } : [1] => [1],
123
124        /// Pops a pointer from the stack and then an `i32` value.
125        /// Stores the value in little-endian at the pointer specified plus the
126        /// constant `offset`.
127        I32Store { offset: ArchitectureSize } : [2] => [0],
128        /// Pops a pointer from the stack and then an `i32` value.
129        /// Stores the low 8 bits of the value in little-endian at the pointer
130        /// specified plus the constant `offset`.
131        I32Store8 { offset: ArchitectureSize } : [2] => [0],
132        /// Pops a pointer from the stack and then an `i32` value.
133        /// Stores the low 16 bits of the value in little-endian at the pointer
134        /// specified plus the constant `offset`.
135        I32Store16 { offset: ArchitectureSize } : [2] => [0],
136        /// Pops a pointer from the stack and then an `i64` value.
137        /// Stores the value in little-endian at the pointer specified plus the
138        /// constant `offset`.
139        I64Store { offset: ArchitectureSize } : [2] => [0],
140        /// Pops a pointer from the stack and then an `f32` value.
141        /// Stores the value in little-endian at the pointer specified plus the
142        /// constant `offset`.
143        F32Store { offset: ArchitectureSize } : [2] => [0],
144        /// Pops a pointer from the stack and then an `f64` value.
145        /// Stores the value in little-endian at the pointer specified plus the
146        /// constant `offset`.
147        F64Store { offset: ArchitectureSize } : [2] => [0],
148
149        /// Like `I32Store` or `I64Store`, but for storing pointer values.
150        PointerStore { offset: ArchitectureSize } : [2] => [0],
151        /// Like `I32Store` or `I64Store`, but for storing array length values.
152        LengthStore { offset: ArchitectureSize } : [2] => [0],
153
154        // Scalar lifting/lowering
155
156        /// Converts an interface type `char` value to a 32-bit integer
157        /// representing the unicode scalar value.
158        I32FromChar : [1] => [1],
159        /// Converts an interface type `u64` value to a wasm `i64`.
160        I64FromU64 : [1] => [1],
161        /// Converts an interface type `s64` value to a wasm `i64`.
162        I64FromS64 : [1] => [1],
163        /// Converts an interface type `u32` value to a wasm `i32`.
164        I32FromU32 : [1] => [1],
165        /// Converts an interface type `s32` value to a wasm `i32`.
166        I32FromS32 : [1] => [1],
167        /// Converts an interface type `u16` value to a wasm `i32`.
168        I32FromU16 : [1] => [1],
169        /// Converts an interface type `s16` value to a wasm `i32`.
170        I32FromS16 : [1] => [1],
171        /// Converts an interface type `u8` value to a wasm `i32`.
172        I32FromU8 : [1] => [1],
173        /// Converts an interface type `s8` value to a wasm `i32`.
174        I32FromS8 : [1] => [1],
175        /// Conversion an interface type `f32` value to a wasm `f32`.
176        ///
177        /// This may be a noop for some implementations, but it's here in case the
178        /// native language representation of `f32` is different than the wasm
179        /// representation of `f32`.
180        CoreF32FromF32 : [1] => [1],
181        /// Conversion an interface type `f64` value to a wasm `f64`.
182        ///
183        /// This may be a noop for some implementations, but it's here in case the
184        /// native language representation of `f64` is different than the wasm
185        /// representation of `f64`.
186        CoreF64FromF64 : [1] => [1],
187
188        /// Converts a native wasm `i32` to an interface type `s8`.
189        ///
190        /// This will truncate the upper bits of the `i32`.
191        S8FromI32 : [1] => [1],
192        /// Converts a native wasm `i32` to an interface type `u8`.
193        ///
194        /// This will truncate the upper bits of the `i32`.
195        U8FromI32 : [1] => [1],
196        /// Converts a native wasm `i32` to an interface type `s16`.
197        ///
198        /// This will truncate the upper bits of the `i32`.
199        S16FromI32 : [1] => [1],
200        /// Converts a native wasm `i32` to an interface type `u16`.
201        ///
202        /// This will truncate the upper bits of the `i32`.
203        U16FromI32 : [1] => [1],
204        /// Converts a native wasm `i32` to an interface type `s32`.
205        S32FromI32 : [1] => [1],
206        /// Converts a native wasm `i32` to an interface type `u32`.
207        U32FromI32 : [1] => [1],
208        /// Converts a native wasm `i64` to an interface type `s64`.
209        S64FromI64 : [1] => [1],
210        /// Converts a native wasm `i64` to an interface type `u64`.
211        U64FromI64 : [1] => [1],
212        /// Converts a native wasm `i32` to an interface type `char`.
213        ///
214        /// It's safe to assume that the `i32` is indeed a valid unicode code point.
215        CharFromI32 : [1] => [1],
216        /// Converts a native wasm `f32` to an interface type `f32`.
217        F32FromCoreF32 : [1] => [1],
218        /// Converts a native wasm `f64` to an interface type `f64`.
219        F64FromCoreF64 : [1] => [1],
220
221        /// Creates a `bool` from an `i32` input, trapping if the `i32` isn't
222        /// zero or one.
223        BoolFromI32 : [1] => [1],
224        /// Creates an `i32` from a `bool` input, must return 0 or 1.
225        I32FromBool : [1] => [1],
226
227        // lists
228
229        /// Lowers a list where the element's layout in the native language is
230        /// expected to match the canonical ABI definition of interface types.
231        ///
232        /// Pops a list value from the stack and pushes the pointer/length onto
233        /// the stack. If `realloc` is set to `Some` then this is expected to
234        /// *consume* the list which means that the data needs to be copied. An
235        /// allocation/copy is expected when:
236        ///
237        /// * A host is calling a wasm export with a list (it needs to copy the
238        ///   list in to the callee's module, allocating space with `realloc`)
239        /// * A wasm export is returning a list (it's expected to use `realloc`
240        ///   to give ownership of the list to the caller.
241        /// * A host is returning a list in a import definition, meaning that
242        ///   space needs to be allocated in the caller with `realloc`).
243        ///
244        /// A copy does not happen (e.g. `realloc` is `None`) when:
245        ///
246        /// * A wasm module calls an import with the list. In this situation
247        ///   it's expected the caller will know how to access this module's
248        ///   memory (e.g. the host has raw access or wasm-to-wasm communication
249        ///   would copy the list).
250        ///
251        /// If `realloc` is `Some` then the adapter is not responsible for
252        /// cleaning up this list because the other end is receiving the
253        /// allocation. If `realloc` is `None` then the adapter is responsible
254        /// for cleaning up any temporary allocation it created, if any.
255        ListCanonLower {
256            element: &'a Type,
257            realloc: Option<&'a str>,
258        } : [1] => [2],
259
260        /// Same as `ListCanonLower`, but used for strings
261        StringLower {
262            realloc: Option<&'a str>,
263        } : [1] => [2],
264
265        /// Lowers a list where the element's layout in the native language is
266        /// not expected to match the canonical ABI definition of interface
267        /// types.
268        ///
269        /// Pops a list value from the stack and pushes the pointer/length onto
270        /// the stack. This operation also pops a block from the block stack
271        /// which is used as the iteration body of writing each element of the
272        /// list consumed.
273        ///
274        /// The `realloc` field here behaves the same way as `ListCanonLower`.
275        /// It's only set to `None` when a wasm module calls a declared import.
276        /// Otherwise lowering in other contexts requires allocating memory for
277        /// the receiver to own.
278        ListLower {
279            element: &'a Type,
280            realloc: Option<&'a str>,
281        } : [1] => [2],
282
283        /// Lifts a list which has a canonical representation into an interface
284        /// types value.
285        ///
286        /// The term "canonical" representation here means that the
287        /// representation of the interface types value in the native language
288        /// exactly matches the canonical ABI definition of the type.
289        ///
290        /// This will consume two `i32` values from the stack, a pointer and a
291        /// length, and then produces an interface value list.
292        ListCanonLift {
293            element: &'a Type,
294            ty: TypeId,
295        } : [2] => [1],
296
297        /// Same as `ListCanonLift`, but used for strings
298        StringLift : [2] => [1],
299
300        /// Lifts a list which into an interface types value.
301        ///
302        /// This will consume two `i32` values from the stack, a pointer and a
303        /// length, and then produces an interface value list.
304        ///
305        /// This will also pop a block from the block stack which is how to
306        /// read each individual element from the list.
307        ListLift {
308            element: &'a Type,
309            ty: TypeId,
310        } : [2] => [1],
311
312        /// Pushes an operand onto the stack representing the list item from
313        /// each iteration of the list.
314        ///
315        /// This is only used inside of blocks related to lowering lists.
316        IterElem { element: &'a Type } : [0] => [1],
317
318        /// Pushes an operand onto the stack representing the base pointer of
319        /// the next element in a list.
320        ///
321        /// This is used for both lifting and lowering lists.
322        IterBasePointer : [0] => [1],
323
324        // records and tuples
325
326        /// Pops a record value off the stack, decomposes the record to all of
327        /// its fields, and then pushes the fields onto the stack.
328        RecordLower {
329            record: &'a Record,
330            name: &'a str,
331            ty: TypeId,
332        } : [1] => [record.fields.len()],
333
334        /// Pops all fields for a record off the stack and then composes them
335        /// into a record.
336        RecordLift {
337            record: &'a Record,
338            name: &'a str,
339            ty: TypeId,
340        } : [record.fields.len()] => [1],
341
342        /// Create an `i32` from a handle.
343        HandleLower {
344            handle: &'a Handle,
345            name: &'a str,
346            ty: TypeId,
347        } : [1] => [1],
348
349        /// Create a handle from an `i32`.
350        HandleLift {
351            handle: &'a Handle,
352            name: &'a str,
353            ty: TypeId,
354        } : [1] => [1],
355
356        /// Create an `i32` from a future.
357        FutureLower {
358            payload: &'a Option<Type>,
359            ty: TypeId,
360        } : [1] => [1],
361
362        /// Create a future from an `i32`.
363        FutureLift {
364            payload: &'a Option<Type>,
365            ty: TypeId,
366        } : [1] => [1],
367
368        /// Create an `i32` from a stream.
369        StreamLower {
370            payload: &'a Option<Type>,
371            ty: TypeId,
372        } : [1] => [1],
373
374        /// Create a stream from an `i32`.
375        StreamLift {
376            payload: &'a Option<Type>,
377            ty: TypeId,
378        } : [1] => [1],
379
380        /// Create an `i32` from an error-context.
381        ErrorContextLower : [1] => [1],
382
383        /// Create a error-context from an `i32`.
384        ErrorContextLift : [1] => [1],
385
386        /// Pops a tuple value off the stack, decomposes the tuple to all of
387        /// its fields, and then pushes the fields onto the stack.
388        TupleLower {
389            tuple: &'a Tuple,
390            ty: TypeId,
391        } : [1] => [tuple.types.len()],
392
393        /// Pops all fields for a tuple off the stack and then composes them
394        /// into a tuple.
395        TupleLift {
396            tuple: &'a Tuple,
397            ty: TypeId,
398        } : [tuple.types.len()] => [1],
399
400        /// Converts a language-specific record-of-bools to a list of `i32`.
401        FlagsLower {
402            flags: &'a Flags,
403            name: &'a str,
404            ty: TypeId,
405        } : [1] => [flags.repr().count()],
406        /// Converts a list of native wasm `i32` to a language-specific
407        /// record-of-bools.
408        FlagsLift {
409            flags: &'a Flags,
410            name: &'a str,
411            ty: TypeId,
412        } : [flags.repr().count()] => [1],
413
414        // variants
415
416        /// This is a special instruction used for `VariantLower`
417        /// instruction to determine the name of the payload, if present, to use
418        /// within each block.
419        ///
420        /// Each sub-block will have this be the first instruction, and if it
421        /// lowers a payload it will expect something bound to this name.
422        VariantPayloadName : [0] => [1],
423
424        /// Pops a variant off the stack as well as `ty.cases.len()` blocks
425        /// from the code generator. Uses each of those blocks and the value
426        /// from the stack to produce `nresults` of items.
427        VariantLower {
428            variant: &'a Variant,
429            name: &'a str,
430            ty: TypeId,
431            results: &'a [WasmType],
432        } : [1] => [results.len()],
433
434        /// Pops an `i32` off the stack as well as `ty.cases.len()` blocks
435        /// from the code generator. Uses each of those blocks and the value
436        /// from the stack to produce a final variant.
437        VariantLift {
438            variant: &'a Variant,
439            name: &'a str,
440            ty: TypeId,
441        } : [1] => [1],
442
443        /// Pops an enum off the stack and pushes the `i32` representation.
444        EnumLower {
445            enum_: &'a Enum,
446            name: &'a str,
447            ty: TypeId,
448        } : [1] => [1],
449
450        /// Pops an `i32` off the stack and lifts it into the `enum` specified.
451        EnumLift {
452            enum_: &'a Enum,
453            name: &'a str,
454            ty: TypeId,
455        } : [1] => [1],
456
457        /// Specialization of `VariantLower` for specifically `option<T>` types,
458        /// otherwise behaves the same as `VariantLower` (e.g. two blocks for
459        /// the two cases.
460        OptionLower {
461            payload: &'a Type,
462            ty: TypeId,
463            results: &'a [WasmType],
464        } : [1] => [results.len()],
465
466        /// Specialization of `VariantLift` for specifically the `option<T>`
467        /// type. Otherwise behaves the same as the `VariantLift` instruction
468        /// with two blocks for the lift.
469        OptionLift {
470            payload: &'a Type,
471            ty: TypeId,
472        } : [1] => [1],
473
474        /// Specialization of `VariantLower` for specifically `result<T, E>`
475        /// types, otherwise behaves the same as `VariantLower` (e.g. two blocks
476        /// for the two cases.
477        ResultLower {
478            result: &'a Result_
479            ty: TypeId,
480            results: &'a [WasmType],
481        } : [1] => [results.len()],
482
483        /// Specialization of `VariantLift` for specifically the `result<T,
484        /// E>` type. Otherwise behaves the same as the `VariantLift`
485        /// instruction with two blocks for the lift.
486        ResultLift {
487            result: &'a Result_,
488            ty: TypeId,
489        } : [1] => [1],
490
491        // calling/control flow
492
493        /// Represents a call to a raw WebAssembly API. The module/name are
494        /// provided inline as well as the types if necessary.
495        CallWasm {
496            name: &'a str,
497            sig: &'a WasmSignature,
498        } : [sig.params.len()] => [sig.results.len()],
499
500        /// Same as `CallWasm`, except the dual where an interface is being
501        /// called rather than a raw wasm function.
502        ///
503        /// Note that this will be used for async functions, and `async_`
504        /// indicates whether the function should be invoked in an async
505        /// fashion.
506        CallInterface {
507            func: &'a Function,
508            async_: bool,
509        } : [func.params.len()] => [usize::from(func.result.is_some())],
510
511        /// Returns `amt` values on the stack. This is always the last
512        /// instruction.
513        Return { amt: usize, func: &'a Function } : [*amt] => [0],
514
515        /// Calls the `realloc` function specified in a malloc-like fashion
516        /// allocating `size` bytes with alignment `align`.
517        ///
518        /// Pushes the returned pointer onto the stack.
519        Malloc {
520            realloc: &'static str,
521            size: ArchitectureSize,
522            align: Alignment,
523        } : [0] => [1],
524
525        /// Used exclusively for guest-code generation this indicates that
526        /// the standard memory deallocation function needs to be invoked with
527        /// the specified parameters.
528        ///
529        /// This will pop a pointer from the stack and push nothing.
530        GuestDeallocate {
531            size: ArchitectureSize,
532            align: Alignment,
533        } : [1] => [0],
534
535        /// Used exclusively for guest-code generation this indicates that
536        /// a string is being deallocated. The ptr/length are on the stack and
537        /// are poppped off and used to deallocate the string.
538        GuestDeallocateString : [2] => [0],
539
540        /// Used exclusively for guest-code generation this indicates that
541        /// a list is being deallocated. The ptr/length are on the stack and
542        /// are poppped off and used to deallocate the list.
543        ///
544        /// This variant also pops a block off the block stack to be used as the
545        /// body of the deallocation loop.
546        GuestDeallocateList {
547            element: &'a Type,
548        } : [2] => [0],
549
550        /// Used exclusively for guest-code generation this indicates that
551        /// a variant is being deallocated. The integer discriminant is popped
552        /// off the stack as well as `blocks` number of blocks popped from the
553        /// blocks stack. The variant is used to select, at runtime, which of
554        /// the blocks is executed to deallocate the variant.
555        GuestDeallocateVariant {
556            blocks: usize,
557        } : [1] => [0],
558
559        /// Deallocates the language-specific handle representation on the top
560        /// of the stack. Used for async imports.
561        DropHandle { ty: &'a Type } : [1] => [0],
562
563        /// Call `task.return` for an async-lifted export.
564        ///
565        /// This will call core wasm import `name` which will be mapped to
566        /// `task.return` later on. The function given has `params` as its
567        /// parameters and it will return no results. This is used to pass the
568        /// lowered representation of a function's results to `task.return`.
569        AsyncTaskReturn { name: &'a str, params: &'a [WasmType] } : [params.len()] => [0],
570
571        /// Force the evaluation of the specified number of expressions and push
572        /// the results to the stack.
573        ///
574        /// This is useful prior to disposing of temporary variables and/or
575        /// allocations which are referenced by one or more not-yet-evaluated
576        /// expressions.
577        Flush { amt: usize } : [*amt] => [*amt],
578    }
579}
580
581#[derive(Debug, PartialEq)]
582pub enum Bitcast {
583    // Upcasts
584    F32ToI32,
585    F64ToI64,
586    I32ToI64,
587    F32ToI64,
588
589    // Downcasts
590    I32ToF32,
591    I64ToF64,
592    I64ToI32,
593    I64ToF32,
594
595    // PointerOrI64 conversions. These preserve provenance when the source
596    // or destination is a pointer value.
597    //
598    // These are used when pointer values are being stored in
599    // (ToP64) and loaded out of (P64To) PointerOrI64 values, so they
600    // always have to preserve provenance when the value being loaded or
601    // stored is a pointer.
602    P64ToI64,
603    I64ToP64,
604    P64ToP,
605    PToP64,
606
607    // Pointer<->number conversions. These do not preserve provenance.
608    //
609    // These are used when integer or floating-point values are being stored in
610    // (I32ToP/etc.) and loaded out of (PToI32/etc.) pointer values, so they
611    // never have any provenance to preserve.
612    I32ToP,
613    PToI32,
614    PToL,
615    LToP,
616
617    // Number<->Number conversions.
618    I32ToL,
619    LToI32,
620    I64ToL,
621    LToI64,
622
623    // Multiple conversions in sequence.
624    Sequence(Box<[Bitcast; 2]>),
625
626    None,
627}
628
629/// Whether the glue code surrounding a call is lifting arguments and lowering
630/// results or vice versa.
631#[derive(Clone, Copy, PartialEq, Eq)]
632pub enum LiftLower {
633    /// When the glue code lifts arguments and lowers results.
634    ///
635    /// ```text
636    /// Wasm --lift-args--> SourceLanguage; call; SourceLanguage --lower-results--> Wasm
637    /// ```
638    LiftArgsLowerResults,
639    /// When the glue code lowers arguments and lifts results.
640    ///
641    /// ```text
642    /// SourceLanguage --lower-args--> Wasm; call; Wasm --lift-results--> SourceLanguage
643    /// ```
644    LowerArgsLiftResults,
645}
646
647/// Trait for language implementors to use to generate glue code between native
648/// WebAssembly signatures and interface types signatures.
649///
650/// This is used as an implementation detail in interpreting the ABI between
651/// interface types and wasm types. Eventually this will be driven by interface
652/// types adapters themselves, but for now the ABI of a function dictates what
653/// instructions are fed in.
654///
655/// Types implementing `Bindgen` are incrementally fed `Instruction` values to
656/// generate code for. Instructions operate like a stack machine where each
657/// instruction has a list of inputs and a list of outputs (provided by the
658/// `emit` function).
659pub trait Bindgen {
660    /// The intermediate type for fragments of code for this type.
661    ///
662    /// For most languages `String` is a suitable intermediate type.
663    type Operand: Clone + fmt::Debug;
664
665    /// Emit code to implement the given instruction.
666    ///
667    /// Each operand is given in `operands` and can be popped off if ownership
668    /// is required. It's guaranteed that `operands` has the appropriate length
669    /// for the `inst` given, as specified with [`Instruction`].
670    ///
671    /// Each result variable should be pushed onto `results`. This function must
672    /// push the appropriate number of results or binding generation will panic.
673    fn emit(
674        &mut self,
675        resolve: &Resolve,
676        inst: &Instruction<'_>,
677        operands: &mut Vec<Self::Operand>,
678        results: &mut Vec<Self::Operand>,
679    );
680
681    /// Gets a operand reference to the return pointer area.
682    ///
683    /// The provided size and alignment is for the function's return type.
684    fn return_pointer(&mut self, size: ArchitectureSize, align: Alignment) -> Self::Operand;
685
686    /// Enters a new block of code to generate code for.
687    ///
688    /// This is currently exclusively used for constructing variants. When a
689    /// variant is constructed a block here will be pushed for each case of a
690    /// variant, generating the code necessary to translate a variant case.
691    ///
692    /// Blocks are completed with `finish_block` below. It's expected that `emit`
693    /// will always push code (if necessary) into the "current block", which is
694    /// updated by calling this method and `finish_block` below.
695    fn push_block(&mut self);
696
697    /// Indicates to the code generator that a block is completed, and the
698    /// `operand` specified was the resulting value of the block.
699    ///
700    /// This method will be used to compute the value of each arm of lifting a
701    /// variant. The `operand` will be `None` if the variant case didn't
702    /// actually have any type associated with it. Otherwise it will be `Some`
703    /// as the last value remaining on the stack representing the value
704    /// associated with a variant's `case`.
705    ///
706    /// It's expected that this will resume code generation in the previous
707    /// block before `push_block` was called. This must also save the results
708    /// of the current block internally for instructions like `ResultLift` to
709    /// use later.
710    fn finish_block(&mut self, operand: &mut Vec<Self::Operand>);
711
712    /// Returns size information that was previously calculated for all types.
713    fn sizes(&self) -> &SizeAlign;
714
715    /// Returns whether or not the specified element type is represented in a
716    /// "canonical" form for lists. This dictates whether the `ListCanonLower`
717    /// and `ListCanonLift` instructions are used or not.
718    fn is_list_canonical(&self, resolve: &Resolve, element: &Type) -> bool;
719}
720
721/// Generates an abstract sequence of instructions which represents this
722/// function being adapted as an imported function.
723///
724/// The instructions here, when executed, will emulate a language with
725/// interface types calling the concrete wasm implementation. The parameters
726/// for the returned instruction sequence are the language's own
727/// interface-types parameters. One instruction in the instruction stream
728/// will be a `Call` which represents calling the actual raw wasm function
729/// signature.
730///
731/// This function is useful, for example, if you're building a language
732/// generator for WASI bindings. This will document how to translate
733/// language-specific values into the wasm types to call a WASI function,
734/// and it will also automatically convert the results of the WASI function
735/// back to a language-specific value.
736pub fn call(
737    resolve: &Resolve,
738    variant: AbiVariant,
739    lift_lower: LiftLower,
740    func: &Function,
741    bindgen: &mut impl Bindgen,
742    async_: bool,
743) {
744    Generator::new(resolve, bindgen).call(func, variant, lift_lower, async_);
745}
746
747pub fn lower_to_memory<B: Bindgen>(
748    resolve: &Resolve,
749    bindgen: &mut B,
750    address: B::Operand,
751    value: B::Operand,
752    ty: &Type,
753) {
754    let mut generator = Generator::new(resolve, bindgen);
755    // TODO: make this configurable? Right now this function is only called for
756    // future/stream callbacks so it's appropriate to skip realloc here as it's
757    // all "lower for wasm import", but this might get reused for something else
758    // in the future.
759    generator.realloc = Some(Realloc::Export("cabi_realloc"));
760    generator.stack.push(value);
761    generator.write_to_memory(ty, address, Default::default());
762}
763
764pub fn lower_flat<B: Bindgen>(
765    resolve: &Resolve,
766    bindgen: &mut B,
767    value: B::Operand,
768    ty: &Type,
769) -> Vec<B::Operand> {
770    let mut generator = Generator::new(resolve, bindgen);
771    generator.stack.push(value);
772    generator.realloc = Some(Realloc::Export("cabi_realloc"));
773    generator.lower(ty);
774    generator.stack
775}
776
777pub fn lift_from_memory<B: Bindgen>(
778    resolve: &Resolve,
779    bindgen: &mut B,
780    address: B::Operand,
781    ty: &Type,
782) -> B::Operand {
783    let mut generator = Generator::new(resolve, bindgen);
784    generator.read_from_memory(ty, address, Default::default());
785    generator.stack.pop().unwrap()
786}
787
788/// Used in a similar manner as the `Interface::call` function except is
789/// used to generate the `post-return` callback for `func`.
790///
791/// This is only intended to be used in guest generators for exported
792/// functions and will primarily generate `GuestDeallocate*` instructions,
793/// plus others used as input to those instructions.
794pub fn post_return(resolve: &Resolve, func: &Function, bindgen: &mut impl Bindgen) {
795    Generator::new(resolve, bindgen).post_return(func);
796}
797
798/// Returns whether the `Function` specified needs a post-return function to
799/// be generated in guest code.
800///
801/// This is used when the return value contains a memory allocation such as
802/// a list or a string primarily.
803pub fn guest_export_needs_post_return(resolve: &Resolve, func: &Function) -> bool {
804    func.result
805        .map(|t| needs_deallocate(resolve, &t, Deallocate::Lists))
806        .unwrap_or(false)
807}
808
809fn needs_deallocate(resolve: &Resolve, ty: &Type, what: Deallocate) -> bool {
810    match ty {
811        Type::String => true,
812        Type::ErrorContext => true,
813        Type::Id(id) => match &resolve.types[*id].kind {
814            TypeDefKind::List(_) => true,
815            TypeDefKind::Type(t) => needs_deallocate(resolve, t, what),
816            TypeDefKind::Handle(Handle::Own(_)) => what.handles(),
817            TypeDefKind::Handle(Handle::Borrow(_)) => false,
818            TypeDefKind::Resource => false,
819            TypeDefKind::Record(r) => r
820                .fields
821                .iter()
822                .any(|f| needs_deallocate(resolve, &f.ty, what)),
823            TypeDefKind::Tuple(t) => t.types.iter().any(|t| needs_deallocate(resolve, t, what)),
824            TypeDefKind::Variant(t) => t
825                .cases
826                .iter()
827                .filter_map(|t| t.ty.as_ref())
828                .any(|t| needs_deallocate(resolve, t, what)),
829            TypeDefKind::Option(t) => needs_deallocate(resolve, t, what),
830            TypeDefKind::Result(t) => [&t.ok, &t.err]
831                .iter()
832                .filter_map(|t| t.as_ref())
833                .any(|t| needs_deallocate(resolve, t, what)),
834            TypeDefKind::Flags(_) | TypeDefKind::Enum(_) => false,
835            TypeDefKind::Future(_) | TypeDefKind::Stream(_) => what.handles(),
836            TypeDefKind::Unknown => unreachable!(),
837            TypeDefKind::FixedSizeList(..) => todo!(),
838        },
839
840        Type::Bool
841        | Type::U8
842        | Type::S8
843        | Type::U16
844        | Type::S16
845        | Type::U32
846        | Type::S32
847        | Type::U64
848        | Type::S64
849        | Type::F32
850        | Type::F64
851        | Type::Char => false,
852    }
853}
854
855/// Generate instructions in `bindgen` to deallocate all lists in `ptr` where
856/// that's a pointer to a sequence of `types` stored in linear memory.
857pub fn deallocate_lists_in_types<B: Bindgen>(
858    resolve: &Resolve,
859    types: &[Type],
860    operands: &[B::Operand],
861    indirect: bool,
862    bindgen: &mut B,
863) {
864    Generator::new(resolve, bindgen).deallocate_in_types(
865        types,
866        operands,
867        indirect,
868        Deallocate::Lists,
869    );
870}
871
872/// Generate instructions in `bindgen` to deallocate all lists in `ptr` where
873/// that's a pointer to a sequence of `types` stored in linear memory.
874pub fn deallocate_lists_and_own_in_types<B: Bindgen>(
875    resolve: &Resolve,
876    types: &[Type],
877    operands: &[B::Operand],
878    indirect: bool,
879    bindgen: &mut B,
880) {
881    Generator::new(resolve, bindgen).deallocate_in_types(
882        types,
883        operands,
884        indirect,
885        Deallocate::ListsAndOwn,
886    );
887}
888
889#[derive(Copy, Clone)]
890pub enum Realloc {
891    None,
892    Export(&'static str),
893}
894
895/// What to deallocate in various `deallocate_*` methods.
896#[derive(Copy, Clone)]
897enum Deallocate {
898    /// Only deallocate lists.
899    Lists,
900    /// Deallocate lists and owned resources such as `own<T>` and
901    /// futures/streams.
902    ListsAndOwn,
903}
904
905impl Deallocate {
906    fn handles(&self) -> bool {
907        match self {
908            Deallocate::Lists => false,
909            Deallocate::ListsAndOwn => true,
910        }
911    }
912}
913
914struct Generator<'a, B: Bindgen> {
915    bindgen: &'a mut B,
916    resolve: &'a Resolve,
917    operands: Vec<B::Operand>,
918    results: Vec<B::Operand>,
919    stack: Vec<B::Operand>,
920    return_pointer: Option<B::Operand>,
921    realloc: Option<Realloc>,
922}
923
924const MAX_FLAT_PARAMS: usize = 16;
925const MAX_FLAT_ASYNC_PARAMS: usize = 4;
926
927impl<'a, B: Bindgen> Generator<'a, B> {
928    fn new(resolve: &'a Resolve, bindgen: &'a mut B) -> Generator<'a, B> {
929        Generator {
930            resolve,
931            bindgen,
932            operands: Vec::new(),
933            results: Vec::new(),
934            stack: Vec::new(),
935            return_pointer: None,
936            realloc: None,
937        }
938    }
939
940    fn call(&mut self, func: &Function, variant: AbiVariant, lift_lower: LiftLower, async_: bool) {
941        let sig = self.resolve.wasm_signature(variant, func);
942
943        // Lowering parameters calling a wasm import _or_ returning a result
944        // from an async-lifted wasm export means we don't need to pass
945        // ownership, but we pass ownership in all other cases.
946        let realloc = match (variant, lift_lower, async_) {
947            (AbiVariant::GuestImport, LiftLower::LowerArgsLiftResults, _)
948            | (
949                AbiVariant::GuestExport
950                | AbiVariant::GuestExportAsync
951                | AbiVariant::GuestExportAsyncStackful,
952                LiftLower::LiftArgsLowerResults,
953                true,
954            ) => Realloc::None,
955            _ => Realloc::Export("cabi_realloc"),
956        };
957        assert!(self.realloc.is_none());
958
959        match lift_lower {
960            LiftLower::LowerArgsLiftResults => {
961                self.realloc = Some(realloc);
962
963                // Create a function that performs individual lowering of operands
964                let lower_to_memory = |self_: &mut Self, ptr: B::Operand| {
965                    let mut offset = ArchitectureSize::default();
966                    for (nth, (_, ty)) in func.params.iter().enumerate() {
967                        self_.emit(&Instruction::GetArg { nth });
968                        offset = align_to_arch(offset, self_.bindgen.sizes().align(ty));
969                        self_.write_to_memory(ty, ptr.clone(), offset);
970                        offset += self_.bindgen.sizes().size(ty);
971                    }
972
973                    self_.stack.push(ptr);
974                };
975
976                // Lower parameters
977                if sig.indirect_params {
978                    // If parameters are indirect space is
979                    // allocated for them and each argument is lowered
980                    // individually into memory.
981                    let ElementInfo { size, align } = self
982                        .bindgen
983                        .sizes()
984                        .record(func.params.iter().map(|t| &t.1));
985
986                    // Resolve the pointer to the indirectly stored parameters
987                    let ptr = match variant {
988                        // When a wasm module calls an import it will provide
989                        // space that isn't explicitly deallocated.
990                        AbiVariant::GuestImport => self.bindgen.return_pointer(size, align),
991
992                        AbiVariant::GuestImportAsync => {
993                            todo!("direct param lowering for async guest import not implemented")
994                        }
995
996                        // When calling a wasm module from the outside, though,
997                        // malloc needs to be called.
998                        AbiVariant::GuestExport => {
999                            self.emit(&Instruction::Malloc {
1000                                realloc: "cabi_realloc",
1001                                size,
1002                                align,
1003                            });
1004                            self.stack.pop().unwrap()
1005                        }
1006
1007                        AbiVariant::GuestExportAsync | AbiVariant::GuestExportAsyncStackful => {
1008                            todo!("direct param lowering for async not implemented")
1009                        }
1010                    };
1011
1012                    // Lower the parameters to memory
1013                    lower_to_memory(self, ptr);
1014                } else {
1015                    // ... otherwise arguments are direct,
1016                    // (there aren't too many) then we simply do a normal lower
1017                    // operation for them all.
1018                    for (nth, (_, ty)) in func.params.iter().enumerate() {
1019                        self.emit(&Instruction::GetArg { nth });
1020                        self.lower(ty);
1021                    }
1022                }
1023                self.realloc = None;
1024
1025                // If necessary we may need to prepare a return pointer for this ABI.
1026                if variant == AbiVariant::GuestImport && sig.retptr {
1027                    let info = self.bindgen.sizes().params(&func.result);
1028                    let ptr = self.bindgen.return_pointer(info.size, info.align);
1029                    self.return_pointer = Some(ptr.clone());
1030                    self.stack.push(ptr);
1031                }
1032
1033                // Call the Wasm function
1034                assert_eq!(self.stack.len(), sig.params.len());
1035                self.emit(&Instruction::CallWasm {
1036                    name: &func.name,
1037                    sig: &sig,
1038                });
1039
1040                // Handle the result
1041                if sig.retptr {
1042                    // If there is a return pointer, we must get the pointer to where results
1043                    // should be stored, and store the results there?
1044
1045                    let ptr = match variant {
1046                        // imports into guests means it's a wasm module
1047                        // calling an imported function. We supplied the
1048                        // return pointer as the last argument (saved in
1049                        // `self.return_pointer`) so we use that to read
1050                        // the result of the function from memory.
1051                        AbiVariant::GuestImport => {
1052                            assert!(sig.results.is_empty());
1053                            self.return_pointer.take().unwrap()
1054                        }
1055
1056                        // guest exports means that this is a host
1057                        // calling wasm so wasm returned a pointer to where
1058                        // the result is stored
1059                        AbiVariant::GuestExport => self.stack.pop().unwrap(),
1060
1061                        AbiVariant::GuestImportAsync
1062                        | AbiVariant::GuestExportAsync
1063                        | AbiVariant::GuestExportAsyncStackful => {
1064                            unreachable!()
1065                        }
1066                    };
1067
1068                    if let (AbiVariant::GuestExport, true) = (variant, async_) {
1069                        // If we're dealing with an async function, the result should not be read from memory
1070                        // immediately, as it's the async call result
1071                        //
1072                        // We can leave the result of the call (the indication of what to do as an async call)
1073                        // on the stack as a return
1074                        self.stack.push(ptr);
1075                    } else {
1076                        // If we're not dealing with an async call, the result must be in memory at this point and can be read out
1077                        self.read_results_from_memory(
1078                            &func.result,
1079                            ptr.clone(),
1080                            ArchitectureSize::default(),
1081                        );
1082                        self.emit(&Instruction::Flush {
1083                            amt: usize::from(func.result.is_some()),
1084                        });
1085                    }
1086                } else {
1087                    // With no return pointer in use we can simply lift the
1088                    // result(s) of the function from the result of the core
1089                    // wasm function.
1090                    if let Some(ty) = &func.result {
1091                        self.lift(ty)
1092                    }
1093                }
1094
1095                // Emit the function return
1096                self.emit(&Instruction::Return {
1097                    func,
1098                    amt: usize::from(func.result.is_some()),
1099                });
1100            }
1101
1102            LiftLower::LiftArgsLowerResults => {
1103                let max_flat_params = match (variant, async_) {
1104                    (AbiVariant::GuestImportAsync, _is_async @ true) => MAX_FLAT_ASYNC_PARAMS,
1105                    _ => MAX_FLAT_PARAMS,
1106                };
1107
1108                // Read parameters from memory
1109                let read_from_memory = |self_: &mut Self| {
1110                    let mut offset = ArchitectureSize::default();
1111                    let ptr = self_
1112                        .stack
1113                        .pop()
1114                        .expect("empty stack during read param from memory");
1115                    for (_, ty) in func.params.iter() {
1116                        offset = align_to_arch(offset, self_.bindgen.sizes().align(ty));
1117                        self_.read_from_memory(ty, ptr.clone(), offset);
1118                        offset += self_.bindgen.sizes().size(ty);
1119                    }
1120                };
1121
1122                // Resolve parameters
1123                if sig.indirect_params {
1124                    // If parameters were passed indirectly, arguments must be
1125                    // read in succession from memory, with the pointer to the arguments
1126                    // being the first argument to the function.
1127                    self.emit(&Instruction::GetArg { nth: 0 });
1128                    read_from_memory(self);
1129                } else {
1130                    // ... otherwise, if parameters were passed directly then we lift each
1131                    // argument in succession from the component wasm types that
1132                    // make-up the type.
1133                    let mut offset = 0;
1134                    for (param_name, ty) in func.params.iter() {
1135                        let Some(types) = flat_types(self.resolve, ty, Some(max_flat_params))
1136                        else {
1137                            panic!("failed to flatten types during direct parameter lifting ('{param_name}' in func '{}')", func.name);
1138                        };
1139                        for _ in 0..types.len() {
1140                            self.emit(&Instruction::GetArg { nth: offset });
1141                            offset += 1;
1142                        }
1143                        self.lift(ty);
1144                    }
1145                }
1146
1147                // ... and that allows us to call the interface types function
1148                self.emit(&Instruction::CallInterface { func, async_ });
1149
1150                // The return value of an async function is *not* the result of the function
1151                // itself or a pointer but rather a status code.
1152                //
1153                // Asynchronous functions will call `task.return` after the
1154                // interface function completes, so lowering is conditional
1155                // based on slightly different logic for the `task.return`
1156                // intrinsic.
1157                //
1158                // Note that in the async import case teh code below deals with the CM function being lowered,
1159                // not the core function that is underneath that (i.e. func.result may be empty,
1160                // where the associated core function underneath must have a i32 status code result)
1161                let (lower_to_memory, async_flat_results) = match (async_, &func.result) {
1162                    // All async cases pass along the function results and flatten where necesary
1163                    (_is_async @ true, func_result) => {
1164                        let results = match &func_result {
1165                            Some(ty) => flat_types(self.resolve, ty, Some(max_flat_params)),
1166                            None => Some(Vec::new()),
1167                        };
1168                        (results.is_none(), Some(results))
1169                    }
1170                    // All other non-async cases
1171                    (_is_async @ false, _) => (sig.retptr, None),
1172                };
1173
1174                // This was dynamically allocated by the caller (or async start
1175                // function) so after it's been read by the guest we need to
1176                // deallocate it.
1177                if let AbiVariant::GuestExport
1178                | AbiVariant::GuestExportAsync
1179                | AbiVariant::GuestExportAsyncStackful = variant
1180                {
1181                    if sig.indirect_params && !async_ {
1182                        let ElementInfo { size, align } = self
1183                            .bindgen
1184                            .sizes()
1185                            .record(func.params.iter().map(|t| &t.1));
1186                        self.emit(&Instruction::GetArg { nth: 0 });
1187                        self.emit(&Instruction::GuestDeallocate { size, align });
1188                    }
1189                }
1190
1191                self.realloc = Some(realloc);
1192
1193                // Perform memory lowing of relevant results, including out pointers as well as traditional results
1194                match (lower_to_memory, sig.retptr, variant) {
1195                    // For sync calls, if no lowering to memory is required and there *is* a return pointer in use
1196                    // then we need to lower then simply lower the result(s) and return that directly from the function.
1197                    (_lower_to_memory @ false, _, _) => {
1198                        if let Some(ty) = &func.result {
1199                            self.lower(ty);
1200                        }
1201                    }
1202
1203                    // Lowering to memory for a guest import
1204                    //
1205                    // When a function is imported to a guest this means
1206                    // it's a host providing the implementation of the
1207                    // import. The result is stored in the pointer
1208                    // specified in the last argument, so we get the
1209                    // pointer here and then write the return value into
1210                    // it.
1211                    (
1212                        _lower_to_memory @ true,
1213                        _has_ret_ptr @ true,
1214                        AbiVariant::GuestImport | AbiVariant::GuestImportAsync,
1215                    ) => {
1216                        self.emit(&Instruction::GetArg {
1217                            nth: sig.params.len() - 1,
1218                        });
1219                        let ptr = self
1220                            .stack
1221                            .pop()
1222                            .expect("empty stack during result lower to memory");
1223                        self.write_params_to_memory(&func.result, ptr, Default::default());
1224                    }
1225
1226                    // Lowering to memory for a guest export
1227                    //
1228                    // For a guest import this is a function defined in
1229                    // wasm, so we're returning a pointer where the
1230                    // value was stored at. Allocate some space here
1231                    // (statically) and then write the result into that
1232                    // memory, returning the pointer at the end.
1233                    (_lower_to_memory @ true, _, variant) => match variant {
1234                        AbiVariant::GuestExport | AbiVariant::GuestExportAsync => {
1235                            let ElementInfo { size, align } =
1236                                self.bindgen.sizes().params(&func.result);
1237                            let ptr = self.bindgen.return_pointer(size, align);
1238                            self.write_params_to_memory(
1239                                &func.result,
1240                                ptr.clone(),
1241                                Default::default(),
1242                            );
1243                            self.stack.push(ptr);
1244                        }
1245                        AbiVariant::GuestImport | AbiVariant::GuestImportAsync => {
1246                            unreachable!(
1247                                "lowering to memory cannot be performed without a return pointer ({async_note} func [{func_name}], variant {variant:#?})",
1248                                async_note = async_.then_some("async").unwrap_or("sync"),
1249                                func_name = func.name,
1250                            )
1251                        }
1252                        AbiVariant::GuestExportAsyncStackful => {
1253                            todo!("stackful exports are not yet supported")
1254                        }
1255                    },
1256                }
1257
1258                // Build and emit the appropriate return
1259                match (variant, async_flat_results) {
1260                    // Async guest imports always return a i32 status code
1261                    (AbiVariant::GuestImport | AbiVariant::GuestImportAsync, None) if async_ => {
1262                        unreachable!("async guest imports must have a return")
1263                    }
1264
1265                    // Async guest imports with results return the status code, not a pointer to any results
1266                    (AbiVariant::GuestImport | AbiVariant::GuestImportAsync, Some(results))
1267                        if async_ =>
1268                    {
1269                        let name = &format!("[task-return]{}", func.name);
1270                        let params = results.as_deref().unwrap_or_default();
1271                        self.emit(&Instruction::AsyncTaskReturn { name, params });
1272                    }
1273
1274                    // All async/non-async cases with results that need to be returned
1275                    //
1276                    // In practice, async imports should not end up here, as the returned result of an
1277                    // async import is *not* a pointer but instead a status code.
1278                    (_, Some(results)) => {
1279                        let name = &format!("[task-return]{}", func.name);
1280                        let params = results.as_deref().unwrap_or(&[WasmType::Pointer]);
1281                        self.emit(&Instruction::AsyncTaskReturn { name, params });
1282                    }
1283
1284                    // All async/non-async cases with no results
1285                    (_, None) => {
1286                        if async_ {
1287                            let name = &format!("[task-return]{}", func.name);
1288                            self.emit(&Instruction::AsyncTaskReturn {
1289                                name: name,
1290                                params: if sig.results.len() > MAX_FLAT_ASYNC_PARAMS {
1291                                    &[WasmType::Pointer]
1292                                } else {
1293                                    &sig.results
1294                                },
1295                            });
1296                        } else {
1297                            self.emit(&Instruction::Return {
1298                                func,
1299                                amt: sig.results.len(),
1300                            });
1301                        }
1302                    }
1303                }
1304
1305                self.realloc = None;
1306            }
1307        }
1308
1309        assert!(self.realloc.is_none());
1310
1311        assert!(
1312            self.stack.is_empty(),
1313            "stack has {} items remaining: {:?}",
1314            self.stack.len(),
1315            self.stack,
1316        );
1317    }
1318
1319    fn post_return(&mut self, func: &Function) {
1320        let sig = self.resolve.wasm_signature(AbiVariant::GuestExport, func);
1321
1322        // Currently post-return is only used for lists and lists are always
1323        // returned indirectly through memory due to their flat representation
1324        // having more than one type. Assert that a return pointer is used,
1325        // though, in case this ever changes.
1326        assert!(sig.retptr);
1327
1328        self.emit(&Instruction::GetArg { nth: 0 });
1329        let addr = self.stack.pop().unwrap();
1330
1331        let mut types = Vec::new();
1332        types.extend(func.result);
1333        self.deallocate_in_types(&types, &[addr], true, Deallocate::Lists);
1334
1335        self.emit(&Instruction::Return { func, amt: 0 });
1336    }
1337
1338    fn deallocate_in_types(
1339        &mut self,
1340        types: &[Type],
1341        operands: &[B::Operand],
1342        indirect: bool,
1343        what: Deallocate,
1344    ) {
1345        if indirect {
1346            assert_eq!(operands.len(), 1);
1347            for (offset, ty) in self.bindgen.sizes().field_offsets(types) {
1348                self.deallocate_indirect(ty, operands[0].clone(), offset, what);
1349            }
1350            assert!(
1351                self.stack.is_empty(),
1352                "stack has {} items remaining",
1353                self.stack.len()
1354            );
1355        } else {
1356            let mut operands = operands;
1357            let mut operands_for_ty;
1358            for ty in types {
1359                let types = flat_types(self.resolve, ty, None).unwrap();
1360                (operands_for_ty, operands) = operands.split_at(types.len());
1361                self.stack.extend_from_slice(operands_for_ty);
1362                self.deallocate(ty, what);
1363                assert!(
1364                    self.stack.is_empty(),
1365                    "stack has {} items remaining",
1366                    self.stack.len()
1367                );
1368            }
1369            assert!(operands.is_empty());
1370        }
1371    }
1372
1373    fn emit(&mut self, inst: &Instruction<'_>) {
1374        self.operands.clear();
1375        self.results.clear();
1376
1377        let operands_len = inst.operands_len();
1378        assert!(
1379            self.stack.len() >= operands_len,
1380            "not enough operands on stack for {:?}: have {} need {operands_len}",
1381            inst,
1382            self.stack.len(),
1383        );
1384        self.operands
1385            .extend(self.stack.drain((self.stack.len() - operands_len)..));
1386        self.results.reserve(inst.results_len());
1387
1388        self.bindgen
1389            .emit(self.resolve, inst, &mut self.operands, &mut self.results);
1390
1391        assert_eq!(
1392            self.results.len(),
1393            inst.results_len(),
1394            "{:?} expected {} results, got {}",
1395            inst,
1396            inst.results_len(),
1397            self.results.len()
1398        );
1399        self.stack.append(&mut self.results);
1400    }
1401
1402    fn push_block(&mut self) {
1403        self.bindgen.push_block();
1404    }
1405
1406    fn finish_block(&mut self, size: usize) {
1407        self.operands.clear();
1408        assert!(
1409            size <= self.stack.len(),
1410            "not enough operands on stack for finishing block",
1411        );
1412        self.operands
1413            .extend(self.stack.drain((self.stack.len() - size)..));
1414        self.bindgen.finish_block(&mut self.operands);
1415    }
1416
1417    fn lower(&mut self, ty: &Type) {
1418        use Instruction::*;
1419
1420        match *ty {
1421            Type::Bool => self.emit(&I32FromBool),
1422            Type::S8 => self.emit(&I32FromS8),
1423            Type::U8 => self.emit(&I32FromU8),
1424            Type::S16 => self.emit(&I32FromS16),
1425            Type::U16 => self.emit(&I32FromU16),
1426            Type::S32 => self.emit(&I32FromS32),
1427            Type::U32 => self.emit(&I32FromU32),
1428            Type::S64 => self.emit(&I64FromS64),
1429            Type::U64 => self.emit(&I64FromU64),
1430            Type::Char => self.emit(&I32FromChar),
1431            Type::F32 => self.emit(&CoreF32FromF32),
1432            Type::F64 => self.emit(&CoreF64FromF64),
1433            Type::String => {
1434                let realloc = self.list_realloc();
1435                self.emit(&StringLower { realloc });
1436            }
1437            Type::ErrorContext => self.emit(&ErrorContextLower),
1438            Type::Id(id) => match &self.resolve.types[id].kind {
1439                TypeDefKind::Type(t) => self.lower(t),
1440                TypeDefKind::List(element) => {
1441                    let realloc = self.list_realloc();
1442                    if self.bindgen.is_list_canonical(self.resolve, element) {
1443                        self.emit(&ListCanonLower { element, realloc });
1444                    } else {
1445                        self.push_block();
1446                        self.emit(&IterElem { element });
1447                        self.emit(&IterBasePointer);
1448                        let addr = self.stack.pop().unwrap();
1449                        self.write_to_memory(element, addr, Default::default());
1450                        self.finish_block(0);
1451                        self.emit(&ListLower { element, realloc });
1452                    }
1453                }
1454                TypeDefKind::Handle(handle) => {
1455                    let (Handle::Own(ty) | Handle::Borrow(ty)) = handle;
1456                    self.emit(&HandleLower {
1457                        handle,
1458                        ty: id,
1459                        name: self.resolve.types[*ty].name.as_deref().unwrap(),
1460                    });
1461                }
1462                TypeDefKind::Resource => {
1463                    todo!();
1464                }
1465                TypeDefKind::Record(record) => {
1466                    self.emit(&RecordLower {
1467                        record,
1468                        ty: id,
1469                        name: self.resolve.types[id].name.as_deref().unwrap(),
1470                    });
1471                    let values = self
1472                        .stack
1473                        .drain(self.stack.len() - record.fields.len()..)
1474                        .collect::<Vec<_>>();
1475                    for (field, value) in record.fields.iter().zip(values) {
1476                        self.stack.push(value);
1477                        self.lower(&field.ty);
1478                    }
1479                }
1480                TypeDefKind::Tuple(tuple) => {
1481                    self.emit(&TupleLower { tuple, ty: id });
1482                    let values = self
1483                        .stack
1484                        .drain(self.stack.len() - tuple.types.len()..)
1485                        .collect::<Vec<_>>();
1486                    for (ty, value) in tuple.types.iter().zip(values) {
1487                        self.stack.push(value);
1488                        self.lower(ty);
1489                    }
1490                }
1491
1492                TypeDefKind::Flags(flags) => {
1493                    self.emit(&FlagsLower {
1494                        flags,
1495                        ty: id,
1496                        name: self.resolve.types[id].name.as_ref().unwrap(),
1497                    });
1498                }
1499
1500                TypeDefKind::Variant(v) => {
1501                    let results =
1502                        self.lower_variant_arms(ty, v.cases.iter().map(|c| c.ty.as_ref()));
1503                    self.emit(&VariantLower {
1504                        variant: v,
1505                        ty: id,
1506                        results: &results,
1507                        name: self.resolve.types[id].name.as_deref().unwrap(),
1508                    });
1509                }
1510                TypeDefKind::Enum(enum_) => {
1511                    self.emit(&EnumLower {
1512                        enum_,
1513                        ty: id,
1514                        name: self.resolve.types[id].name.as_deref().unwrap(),
1515                    });
1516                }
1517                TypeDefKind::Option(t) => {
1518                    let results = self.lower_variant_arms(ty, [None, Some(t)]);
1519                    self.emit(&OptionLower {
1520                        payload: t,
1521                        ty: id,
1522                        results: &results,
1523                    });
1524                }
1525                TypeDefKind::Result(r) => {
1526                    let results = self.lower_variant_arms(ty, [r.ok.as_ref(), r.err.as_ref()]);
1527                    self.emit(&ResultLower {
1528                        result: r,
1529                        ty: id,
1530                        results: &results,
1531                    });
1532                }
1533                TypeDefKind::Future(ty) => {
1534                    self.emit(&FutureLower {
1535                        payload: ty,
1536                        ty: id,
1537                    });
1538                }
1539                TypeDefKind::Stream(ty) => {
1540                    self.emit(&StreamLower {
1541                        payload: ty,
1542                        ty: id,
1543                    });
1544                }
1545                TypeDefKind::Unknown => unreachable!(),
1546                TypeDefKind::FixedSizeList(..) => todo!(),
1547            },
1548        }
1549    }
1550
1551    fn lower_variant_arms<'b>(
1552        &mut self,
1553        ty: &Type,
1554        cases: impl IntoIterator<Item = Option<&'b Type>>,
1555    ) -> Vec<WasmType> {
1556        use Instruction::*;
1557        let results = flat_types(self.resolve, ty, None).unwrap();
1558        let mut casts = Vec::new();
1559        for (i, ty) in cases.into_iter().enumerate() {
1560            self.push_block();
1561            self.emit(&VariantPayloadName);
1562            let payload_name = self.stack.pop().unwrap();
1563            self.emit(&I32Const { val: i as i32 });
1564            let mut pushed = 1;
1565            if let Some(ty) = ty {
1566                // Using the payload of this block we lower the type to
1567                // raw wasm values.
1568                self.stack.push(payload_name);
1569                self.lower(ty);
1570
1571                // Determine the types of all the wasm values we just
1572                // pushed, and record how many. If we pushed too few
1573                // then we'll need to push some zeros after this.
1574                let temp = flat_types(self.resolve, ty, None).unwrap();
1575                pushed += temp.len();
1576
1577                // For all the types pushed we may need to insert some
1578                // bitcasts. This will go through and cast everything
1579                // to the right type to ensure all blocks produce the
1580                // same set of results.
1581                casts.truncate(0);
1582                for (actual, expected) in temp.iter().zip(&results[1..]) {
1583                    casts.push(cast(*actual, *expected));
1584                }
1585                if casts.iter().any(|c| *c != Bitcast::None) {
1586                    self.emit(&Bitcasts { casts: &casts });
1587                }
1588            }
1589
1590            // If we haven't pushed enough items in this block to match
1591            // what other variants are pushing then we need to push
1592            // some zeros.
1593            if pushed < results.len() {
1594                self.emit(&ConstZero {
1595                    tys: &results[pushed..],
1596                });
1597            }
1598            self.finish_block(results.len());
1599        }
1600        results
1601    }
1602
1603    fn list_realloc(&self) -> Option<&'static str> {
1604        match self.realloc.expect("realloc should be configured") {
1605            Realloc::None => None,
1606            Realloc::Export(s) => Some(s),
1607        }
1608    }
1609
1610    /// Note that in general everything in this function is the opposite of the
1611    /// `lower` function above. This is intentional and should be kept this way!
1612    fn lift(&mut self, ty: &Type) {
1613        use Instruction::*;
1614
1615        match *ty {
1616            Type::Bool => self.emit(&BoolFromI32),
1617            Type::S8 => self.emit(&S8FromI32),
1618            Type::U8 => self.emit(&U8FromI32),
1619            Type::S16 => self.emit(&S16FromI32),
1620            Type::U16 => self.emit(&U16FromI32),
1621            Type::S32 => self.emit(&S32FromI32),
1622            Type::U32 => self.emit(&U32FromI32),
1623            Type::S64 => self.emit(&S64FromI64),
1624            Type::U64 => self.emit(&U64FromI64),
1625            Type::Char => self.emit(&CharFromI32),
1626            Type::F32 => self.emit(&F32FromCoreF32),
1627            Type::F64 => self.emit(&F64FromCoreF64),
1628            Type::String => self.emit(&StringLift),
1629            Type::ErrorContext => self.emit(&ErrorContextLift),
1630            Type::Id(id) => match &self.resolve.types[id].kind {
1631                TypeDefKind::Type(t) => self.lift(t),
1632                TypeDefKind::List(element) => {
1633                    if self.bindgen.is_list_canonical(self.resolve, element) {
1634                        self.emit(&ListCanonLift { element, ty: id });
1635                    } else {
1636                        self.push_block();
1637                        self.emit(&IterBasePointer);
1638                        let addr = self.stack.pop().unwrap();
1639                        self.read_from_memory(element, addr, Default::default());
1640                        self.finish_block(1);
1641                        self.emit(&ListLift { element, ty: id });
1642                    }
1643                }
1644                TypeDefKind::Handle(handle) => {
1645                    let (Handle::Own(ty) | Handle::Borrow(ty)) = handle;
1646                    self.emit(&HandleLift {
1647                        handle,
1648                        ty: id,
1649                        name: self.resolve.types[*ty].name.as_deref().unwrap(),
1650                    });
1651                }
1652                TypeDefKind::Resource => {
1653                    todo!();
1654                }
1655                TypeDefKind::Record(record) => {
1656                    self.flat_for_each_record_type(
1657                        ty,
1658                        record.fields.iter().map(|f| &f.ty),
1659                        Self::lift,
1660                    );
1661                    self.emit(&RecordLift {
1662                        record,
1663                        ty: id,
1664                        name: self.resolve.types[id].name.as_deref().unwrap(),
1665                    });
1666                }
1667                TypeDefKind::Tuple(tuple) => {
1668                    self.flat_for_each_record_type(ty, tuple.types.iter(), Self::lift);
1669                    self.emit(&TupleLift { tuple, ty: id });
1670                }
1671                TypeDefKind::Flags(flags) => {
1672                    self.emit(&FlagsLift {
1673                        flags,
1674                        ty: id,
1675                        name: self.resolve.types[id].name.as_ref().unwrap(),
1676                    });
1677                }
1678
1679                TypeDefKind::Variant(v) => {
1680                    self.flat_for_each_variant_arm(
1681                        ty,
1682                        true,
1683                        v.cases.iter().map(|c| c.ty.as_ref()),
1684                        Self::lift,
1685                    );
1686                    self.emit(&VariantLift {
1687                        variant: v,
1688                        ty: id,
1689                        name: self.resolve.types[id].name.as_deref().unwrap(),
1690                    });
1691                }
1692
1693                TypeDefKind::Enum(enum_) => {
1694                    self.emit(&EnumLift {
1695                        enum_,
1696                        ty: id,
1697                        name: self.resolve.types[id].name.as_deref().unwrap(),
1698                    });
1699                }
1700
1701                TypeDefKind::Option(t) => {
1702                    self.flat_for_each_variant_arm(ty, true, [None, Some(t)], Self::lift);
1703                    self.emit(&OptionLift { payload: t, ty: id });
1704                }
1705
1706                TypeDefKind::Result(r) => {
1707                    self.flat_for_each_variant_arm(
1708                        ty,
1709                        true,
1710                        [r.ok.as_ref(), r.err.as_ref()],
1711                        Self::lift,
1712                    );
1713                    self.emit(&ResultLift { result: r, ty: id });
1714                }
1715
1716                TypeDefKind::Future(ty) => {
1717                    self.emit(&FutureLift {
1718                        payload: ty,
1719                        ty: id,
1720                    });
1721                }
1722                TypeDefKind::Stream(ty) => {
1723                    self.emit(&StreamLift {
1724                        payload: ty,
1725                        ty: id,
1726                    });
1727                }
1728                TypeDefKind::Unknown => unreachable!(),
1729                TypeDefKind::FixedSizeList(..) => todo!(),
1730            },
1731        }
1732    }
1733
1734    fn flat_for_each_record_type<'b>(
1735        &mut self,
1736        container: &Type,
1737        types: impl Iterator<Item = &'b Type>,
1738        mut iter: impl FnMut(&mut Self, &Type),
1739    ) {
1740        let temp = flat_types(self.resolve, container, None).unwrap();
1741        let mut args = self
1742            .stack
1743            .drain(self.stack.len() - temp.len()..)
1744            .collect::<Vec<_>>();
1745        for ty in types {
1746            let temp = flat_types(self.resolve, ty, None).unwrap();
1747            self.stack.extend(args.drain(..temp.len()));
1748            iter(self, ty);
1749        }
1750    }
1751
1752    fn flat_for_each_variant_arm<'b>(
1753        &mut self,
1754        ty: &Type,
1755        blocks_with_type_have_result: bool,
1756        cases: impl IntoIterator<Item = Option<&'b Type>>,
1757        mut iter: impl FnMut(&mut Self, &Type),
1758    ) {
1759        let params = flat_types(self.resolve, ty, None).unwrap();
1760        let mut casts = Vec::new();
1761        let block_inputs = self
1762            .stack
1763            .drain(self.stack.len() + 1 - params.len()..)
1764            .collect::<Vec<_>>();
1765        for ty in cases {
1766            self.push_block();
1767            if let Some(ty) = ty {
1768                // Push only the values we need for this variant onto
1769                // the stack.
1770                let temp = flat_types(self.resolve, ty, None).unwrap();
1771                self.stack
1772                    .extend(block_inputs[..temp.len()].iter().cloned());
1773
1774                // Cast all the types we have on the stack to the actual
1775                // types needed for this variant, if necessary.
1776                casts.truncate(0);
1777                for (actual, expected) in temp.iter().zip(&params[1..]) {
1778                    casts.push(cast(*expected, *actual));
1779                }
1780                if casts.iter().any(|c| *c != Bitcast::None) {
1781                    self.emit(&Instruction::Bitcasts { casts: &casts });
1782                }
1783
1784                // Then recursively lift this variant's payload.
1785                iter(self, ty);
1786            }
1787            self.finish_block(if blocks_with_type_have_result {
1788                ty.is_some() as usize
1789            } else {
1790                0
1791            });
1792        }
1793    }
1794
1795    fn write_to_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1796        use Instruction::*;
1797
1798        match *ty {
1799            // Builtin types need different flavors of storage instructions
1800            // depending on the size of the value written.
1801            Type::Bool | Type::U8 | Type::S8 => {
1802                self.lower_and_emit(ty, addr, &I32Store8 { offset })
1803            }
1804            Type::U16 | Type::S16 => self.lower_and_emit(ty, addr, &I32Store16 { offset }),
1805            Type::U32 | Type::S32 | Type::Char => {
1806                self.lower_and_emit(ty, addr, &I32Store { offset })
1807            }
1808            Type::U64 | Type::S64 => self.lower_and_emit(ty, addr, &I64Store { offset }),
1809            Type::F32 => self.lower_and_emit(ty, addr, &F32Store { offset }),
1810            Type::F64 => self.lower_and_emit(ty, addr, &F64Store { offset }),
1811            Type::String => self.write_list_to_memory(ty, addr, offset),
1812            Type::ErrorContext => self.lower_and_emit(ty, addr, &I32Store { offset }),
1813
1814            Type::Id(id) => match &self.resolve.types[id].kind {
1815                TypeDefKind::Type(t) => self.write_to_memory(t, addr, offset),
1816                TypeDefKind::List(_) => self.write_list_to_memory(ty, addr, offset),
1817
1818                TypeDefKind::Future(_) | TypeDefKind::Stream(_) | TypeDefKind::Handle(_) => {
1819                    self.lower_and_emit(ty, addr, &I32Store { offset })
1820                }
1821
1822                // Decompose the record into its components and then write all
1823                // the components into memory one-by-one.
1824                TypeDefKind::Record(record) => {
1825                    self.emit(&RecordLower {
1826                        record,
1827                        ty: id,
1828                        name: self.resolve.types[id].name.as_deref().unwrap(),
1829                    });
1830                    self.write_fields_to_memory(record.fields.iter().map(|f| &f.ty), addr, offset);
1831                }
1832                TypeDefKind::Resource => {
1833                    todo!()
1834                }
1835                TypeDefKind::Tuple(tuple) => {
1836                    self.emit(&TupleLower { tuple, ty: id });
1837                    self.write_fields_to_memory(tuple.types.iter(), addr, offset);
1838                }
1839
1840                TypeDefKind::Flags(f) => {
1841                    self.lower(ty);
1842                    match f.repr() {
1843                        FlagsRepr::U8 => {
1844                            self.stack.push(addr);
1845                            self.store_intrepr(offset, Int::U8);
1846                        }
1847                        FlagsRepr::U16 => {
1848                            self.stack.push(addr);
1849                            self.store_intrepr(offset, Int::U16);
1850                        }
1851                        FlagsRepr::U32(n) => {
1852                            for i in (0..n).rev() {
1853                                self.stack.push(addr.clone());
1854                                self.emit(&I32Store {
1855                                    offset: offset.add_bytes(i * 4),
1856                                });
1857                            }
1858                        }
1859                    }
1860                }
1861
1862                // Each case will get its own block, and the first item in each
1863                // case is writing the discriminant. After that if we have a
1864                // payload we write the payload after the discriminant, aligned up
1865                // to the type's alignment.
1866                TypeDefKind::Variant(v) => {
1867                    self.write_variant_arms_to_memory(
1868                        offset,
1869                        addr,
1870                        v.tag(),
1871                        v.cases.iter().map(|c| c.ty.as_ref()),
1872                    );
1873                    self.emit(&VariantLower {
1874                        variant: v,
1875                        ty: id,
1876                        results: &[],
1877                        name: self.resolve.types[id].name.as_deref().unwrap(),
1878                    });
1879                }
1880
1881                TypeDefKind::Option(t) => {
1882                    self.write_variant_arms_to_memory(offset, addr, Int::U8, [None, Some(t)]);
1883                    self.emit(&OptionLower {
1884                        payload: t,
1885                        ty: id,
1886                        results: &[],
1887                    });
1888                }
1889
1890                TypeDefKind::Result(r) => {
1891                    self.write_variant_arms_to_memory(
1892                        offset,
1893                        addr,
1894                        Int::U8,
1895                        [r.ok.as_ref(), r.err.as_ref()],
1896                    );
1897                    self.emit(&ResultLower {
1898                        result: r,
1899                        ty: id,
1900                        results: &[],
1901                    });
1902                }
1903
1904                TypeDefKind::Enum(e) => {
1905                    self.lower(ty);
1906                    self.stack.push(addr);
1907                    self.store_intrepr(offset, e.tag());
1908                }
1909
1910                TypeDefKind::Unknown => unreachable!(),
1911                TypeDefKind::FixedSizeList(..) => todo!(),
1912            },
1913        }
1914    }
1915
1916    fn write_params_to_memory<'b>(
1917        &mut self,
1918        params: impl IntoIterator<Item = &'b Type, IntoIter: ExactSizeIterator>,
1919        addr: B::Operand,
1920        offset: ArchitectureSize,
1921    ) {
1922        self.write_fields_to_memory(params, addr, offset);
1923    }
1924
1925    fn write_variant_arms_to_memory<'b>(
1926        &mut self,
1927        offset: ArchitectureSize,
1928        addr: B::Operand,
1929        tag: Int,
1930        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
1931    ) {
1932        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
1933        for (i, ty) in cases.into_iter().enumerate() {
1934            self.push_block();
1935            self.emit(&Instruction::VariantPayloadName);
1936            let payload_name = self.stack.pop().unwrap();
1937            self.emit(&Instruction::I32Const { val: i as i32 });
1938            self.stack.push(addr.clone());
1939            self.store_intrepr(offset, tag);
1940            if let Some(ty) = ty {
1941                self.stack.push(payload_name.clone());
1942                self.write_to_memory(ty, addr.clone(), payload_offset);
1943            }
1944            self.finish_block(0);
1945        }
1946    }
1947
1948    fn write_list_to_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1949        // After lowering the list there's two i32 values on the stack
1950        // which we write into memory, writing the pointer into the low address
1951        // and the length into the high address.
1952        self.lower(ty);
1953        self.stack.push(addr.clone());
1954        self.emit(&Instruction::LengthStore {
1955            offset: offset + self.bindgen.sizes().align(ty).into(),
1956        });
1957        self.stack.push(addr);
1958        self.emit(&Instruction::PointerStore { offset });
1959    }
1960
1961    fn write_fields_to_memory<'b>(
1962        &mut self,
1963        tys: impl IntoIterator<Item = &'b Type, IntoIter: ExactSizeIterator>,
1964        addr: B::Operand,
1965        offset: ArchitectureSize,
1966    ) {
1967        let tys = tys.into_iter();
1968        let fields = self
1969            .stack
1970            .drain(self.stack.len() - tys.len()..)
1971            .collect::<Vec<_>>();
1972        for ((field_offset, ty), op) in self
1973            .bindgen
1974            .sizes()
1975            .field_offsets(tys)
1976            .into_iter()
1977            .zip(fields)
1978        {
1979            self.stack.push(op);
1980            self.write_to_memory(ty, addr.clone(), offset + (field_offset));
1981        }
1982    }
1983
1984    fn lower_and_emit(&mut self, ty: &Type, addr: B::Operand, instr: &Instruction) {
1985        self.lower(ty);
1986        self.stack.push(addr);
1987        self.emit(instr);
1988    }
1989
1990    fn read_from_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1991        use Instruction::*;
1992
1993        match *ty {
1994            Type::Bool => self.emit_and_lift(ty, addr, &I32Load8U { offset }),
1995            Type::U8 => self.emit_and_lift(ty, addr, &I32Load8U { offset }),
1996            Type::S8 => self.emit_and_lift(ty, addr, &I32Load8S { offset }),
1997            Type::U16 => self.emit_and_lift(ty, addr, &I32Load16U { offset }),
1998            Type::S16 => self.emit_and_lift(ty, addr, &I32Load16S { offset }),
1999            Type::U32 | Type::S32 | Type::Char => self.emit_and_lift(ty, addr, &I32Load { offset }),
2000            Type::U64 | Type::S64 => self.emit_and_lift(ty, addr, &I64Load { offset }),
2001            Type::F32 => self.emit_and_lift(ty, addr, &F32Load { offset }),
2002            Type::F64 => self.emit_and_lift(ty, addr, &F64Load { offset }),
2003            Type::String => self.read_list_from_memory(ty, addr, offset),
2004            Type::ErrorContext => self.emit_and_lift(ty, addr, &I32Load { offset }),
2005
2006            Type::Id(id) => match &self.resolve.types[id].kind {
2007                TypeDefKind::Type(t) => self.read_from_memory(t, addr, offset),
2008
2009                TypeDefKind::List(_) => self.read_list_from_memory(ty, addr, offset),
2010
2011                TypeDefKind::Future(_) | TypeDefKind::Stream(_) | TypeDefKind::Handle(_) => {
2012                    self.emit_and_lift(ty, addr, &I32Load { offset })
2013                }
2014
2015                TypeDefKind::Resource => {
2016                    todo!();
2017                }
2018
2019                // Read and lift each field individually, adjusting the offset
2020                // as we go along, then aggregate all the fields into the
2021                // record.
2022                TypeDefKind::Record(record) => {
2023                    self.read_fields_from_memory(record.fields.iter().map(|f| &f.ty), addr, offset);
2024                    self.emit(&RecordLift {
2025                        record,
2026                        ty: id,
2027                        name: self.resolve.types[id].name.as_deref().unwrap(),
2028                    });
2029                }
2030
2031                TypeDefKind::Tuple(tuple) => {
2032                    self.read_fields_from_memory(&tuple.types, addr, offset);
2033                    self.emit(&TupleLift { tuple, ty: id });
2034                }
2035
2036                TypeDefKind::Flags(f) => {
2037                    match f.repr() {
2038                        FlagsRepr::U8 => {
2039                            self.stack.push(addr);
2040                            self.load_intrepr(offset, Int::U8);
2041                        }
2042                        FlagsRepr::U16 => {
2043                            self.stack.push(addr);
2044                            self.load_intrepr(offset, Int::U16);
2045                        }
2046                        FlagsRepr::U32(n) => {
2047                            for i in 0..n {
2048                                self.stack.push(addr.clone());
2049                                self.emit(&I32Load {
2050                                    offset: offset.add_bytes(i * 4),
2051                                });
2052                            }
2053                        }
2054                    }
2055                    self.lift(ty);
2056                }
2057
2058                // Each case will get its own block, and we'll dispatch to the
2059                // right block based on the `i32.load` we initially perform. Each
2060                // individual block is pretty simple and just reads the payload type
2061                // from the corresponding offset if one is available.
2062                TypeDefKind::Variant(variant) => {
2063                    self.read_variant_arms_from_memory(
2064                        offset,
2065                        addr,
2066                        variant.tag(),
2067                        variant.cases.iter().map(|c| c.ty.as_ref()),
2068                    );
2069                    self.emit(&VariantLift {
2070                        variant,
2071                        ty: id,
2072                        name: self.resolve.types[id].name.as_deref().unwrap(),
2073                    });
2074                }
2075
2076                TypeDefKind::Option(t) => {
2077                    self.read_variant_arms_from_memory(offset, addr, Int::U8, [None, Some(t)]);
2078                    self.emit(&OptionLift { payload: t, ty: id });
2079                }
2080
2081                TypeDefKind::Result(r) => {
2082                    self.read_variant_arms_from_memory(
2083                        offset,
2084                        addr,
2085                        Int::U8,
2086                        [r.ok.as_ref(), r.err.as_ref()],
2087                    );
2088                    self.emit(&ResultLift { result: r, ty: id });
2089                }
2090
2091                TypeDefKind::Enum(e) => {
2092                    self.stack.push(addr.clone());
2093                    self.load_intrepr(offset, e.tag());
2094                    self.lift(ty);
2095                }
2096
2097                TypeDefKind::Unknown => unreachable!(),
2098                TypeDefKind::FixedSizeList(..) => todo!(),
2099            },
2100        }
2101    }
2102
2103    fn read_results_from_memory(
2104        &mut self,
2105        result: &Option<Type>,
2106        addr: B::Operand,
2107        offset: ArchitectureSize,
2108    ) {
2109        self.read_fields_from_memory(result, addr, offset)
2110    }
2111
2112    fn read_variant_arms_from_memory<'b>(
2113        &mut self,
2114        offset: ArchitectureSize,
2115        addr: B::Operand,
2116        tag: Int,
2117        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
2118    ) {
2119        self.stack.push(addr.clone());
2120        self.load_intrepr(offset, tag);
2121        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
2122        for ty in cases {
2123            self.push_block();
2124            if let Some(ty) = ty {
2125                self.read_from_memory(ty, addr.clone(), payload_offset);
2126            }
2127            self.finish_block(ty.is_some() as usize);
2128        }
2129    }
2130
2131    fn read_list_from_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
2132        // Read the pointer/len and then perform the standard lifting
2133        // proceses.
2134        self.stack.push(addr.clone());
2135        self.emit(&Instruction::PointerLoad { offset });
2136        self.stack.push(addr);
2137        self.emit(&Instruction::LengthLoad {
2138            offset: offset + self.bindgen.sizes().align(ty).into(),
2139        });
2140        self.lift(ty);
2141    }
2142
2143    fn read_fields_from_memory<'b>(
2144        &mut self,
2145        tys: impl IntoIterator<Item = &'b Type>,
2146        addr: B::Operand,
2147        offset: ArchitectureSize,
2148    ) {
2149        for (field_offset, ty) in self.bindgen.sizes().field_offsets(tys).iter() {
2150            self.read_from_memory(ty, addr.clone(), offset + (*field_offset));
2151        }
2152    }
2153
2154    fn emit_and_lift(&mut self, ty: &Type, addr: B::Operand, instr: &Instruction) {
2155        self.stack.push(addr);
2156        self.emit(instr);
2157        self.lift(ty);
2158    }
2159
2160    fn load_intrepr(&mut self, offset: ArchitectureSize, repr: Int) {
2161        self.emit(&match repr {
2162            Int::U64 => Instruction::I64Load { offset },
2163            Int::U32 => Instruction::I32Load { offset },
2164            Int::U16 => Instruction::I32Load16U { offset },
2165            Int::U8 => Instruction::I32Load8U { offset },
2166        });
2167    }
2168
2169    fn store_intrepr(&mut self, offset: ArchitectureSize, repr: Int) {
2170        self.emit(&match repr {
2171            Int::U64 => Instruction::I64Store { offset },
2172            Int::U32 => Instruction::I32Store { offset },
2173            Int::U16 => Instruction::I32Store16 { offset },
2174            Int::U8 => Instruction::I32Store8 { offset },
2175        });
2176    }
2177
2178    /// Runs the deallocation of `ty` for the operands currently on
2179    /// `self.stack`.
2180    ///
2181    /// This will pop the ABI items of `ty` from `self.stack`.
2182    fn deallocate(&mut self, ty: &Type, what: Deallocate) {
2183        use Instruction::*;
2184
2185        match *ty {
2186            Type::String => {
2187                self.emit(&Instruction::GuestDeallocateString);
2188            }
2189
2190            Type::Bool
2191            | Type::U8
2192            | Type::S8
2193            | Type::U16
2194            | Type::S16
2195            | Type::U32
2196            | Type::S32
2197            | Type::Char
2198            | Type::U64
2199            | Type::S64
2200            | Type::F32
2201            | Type::F64
2202            | Type::ErrorContext => {
2203                // No deallocation necessary, just discard the operand on the
2204                // stack.
2205                self.stack.pop().unwrap();
2206            }
2207
2208            Type::Id(id) => match &self.resolve.types[id].kind {
2209                TypeDefKind::Type(t) => self.deallocate(t, what),
2210
2211                TypeDefKind::List(element) => {
2212                    self.push_block();
2213                    self.emit(&IterBasePointer);
2214                    let elemaddr = self.stack.pop().unwrap();
2215                    self.deallocate_indirect(element, elemaddr, Default::default(), what);
2216                    self.finish_block(0);
2217
2218                    self.emit(&Instruction::GuestDeallocateList { element });
2219                }
2220
2221                TypeDefKind::Handle(Handle::Own(_))
2222                | TypeDefKind::Future(_)
2223                | TypeDefKind::Stream(_)
2224                    if what.handles() =>
2225                {
2226                    self.lift(ty);
2227                    self.emit(&DropHandle { ty });
2228                }
2229
2230                TypeDefKind::Record(record) => {
2231                    self.flat_for_each_record_type(
2232                        ty,
2233                        record.fields.iter().map(|f| &f.ty),
2234                        |me, ty| me.deallocate(ty, what),
2235                    );
2236                }
2237
2238                TypeDefKind::Tuple(tuple) => {
2239                    self.flat_for_each_record_type(ty, tuple.types.iter(), |me, ty| {
2240                        me.deallocate(ty, what)
2241                    });
2242                }
2243
2244                TypeDefKind::Variant(variant) => {
2245                    self.flat_for_each_variant_arm(
2246                        ty,
2247                        false,
2248                        variant.cases.iter().map(|c| c.ty.as_ref()),
2249                        |me, ty| me.deallocate(ty, what),
2250                    );
2251                    self.emit(&GuestDeallocateVariant {
2252                        blocks: variant.cases.len(),
2253                    });
2254                }
2255
2256                TypeDefKind::Option(t) => {
2257                    self.flat_for_each_variant_arm(ty, false, [None, Some(t)], |me, ty| {
2258                        me.deallocate(ty, what)
2259                    });
2260                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2261                }
2262
2263                TypeDefKind::Result(e) => {
2264                    self.flat_for_each_variant_arm(
2265                        ty,
2266                        false,
2267                        [e.ok.as_ref(), e.err.as_ref()],
2268                        |me, ty| me.deallocate(ty, what),
2269                    );
2270                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2271                }
2272
2273                // discard the operand on the stack, otherwise nothing to free.
2274                TypeDefKind::Flags(_)
2275                | TypeDefKind::Enum(_)
2276                | TypeDefKind::Future(_)
2277                | TypeDefKind::Stream(_)
2278                | TypeDefKind::Handle(Handle::Own(_))
2279                | TypeDefKind::Handle(Handle::Borrow(_)) => {
2280                    self.stack.pop().unwrap();
2281                }
2282
2283                TypeDefKind::Resource => unreachable!(),
2284                TypeDefKind::Unknown => unreachable!(),
2285
2286                TypeDefKind::FixedSizeList(..) => todo!(),
2287            },
2288        }
2289    }
2290
2291    fn deallocate_indirect(
2292        &mut self,
2293        ty: &Type,
2294        addr: B::Operand,
2295        offset: ArchitectureSize,
2296        what: Deallocate,
2297    ) {
2298        use Instruction::*;
2299
2300        // No need to execute any instructions if this type itself doesn't
2301        // require any form of post-return.
2302        if !needs_deallocate(self.resolve, ty, what) {
2303            return;
2304        }
2305
2306        match *ty {
2307            Type::String => {
2308                self.stack.push(addr.clone());
2309                self.emit(&Instruction::PointerLoad { offset });
2310                self.stack.push(addr);
2311                self.emit(&Instruction::LengthLoad {
2312                    offset: offset + self.bindgen.sizes().align(ty).into(),
2313                });
2314                self.deallocate(ty, what);
2315            }
2316
2317            Type::Bool
2318            | Type::U8
2319            | Type::S8
2320            | Type::U16
2321            | Type::S16
2322            | Type::U32
2323            | Type::S32
2324            | Type::Char
2325            | Type::U64
2326            | Type::S64
2327            | Type::F32
2328            | Type::F64
2329            | Type::ErrorContext => {}
2330
2331            Type::Id(id) => match &self.resolve.types[id].kind {
2332                TypeDefKind::Type(t) => self.deallocate_indirect(t, addr, offset, what),
2333
2334                TypeDefKind::List(_) => {
2335                    self.stack.push(addr.clone());
2336                    self.emit(&Instruction::PointerLoad { offset });
2337                    self.stack.push(addr);
2338                    self.emit(&Instruction::LengthLoad {
2339                        offset: offset + self.bindgen.sizes().align(ty).into(),
2340                    });
2341
2342                    self.deallocate(ty, what);
2343                }
2344
2345                TypeDefKind::Handle(Handle::Own(_))
2346                | TypeDefKind::Future(_)
2347                | TypeDefKind::Stream(_)
2348                    if what.handles() =>
2349                {
2350                    self.read_from_memory(ty, addr, offset);
2351                    self.emit(&DropHandle { ty });
2352                }
2353
2354                TypeDefKind::Handle(Handle::Own(_)) => unreachable!(),
2355                TypeDefKind::Handle(Handle::Borrow(_)) => unreachable!(),
2356                TypeDefKind::Resource => unreachable!(),
2357
2358                TypeDefKind::Record(record) => {
2359                    self.deallocate_indirect_fields(
2360                        &record.fields.iter().map(|f| f.ty).collect::<Vec<_>>(),
2361                        addr,
2362                        offset,
2363                        what,
2364                    );
2365                }
2366
2367                TypeDefKind::Tuple(tuple) => {
2368                    self.deallocate_indirect_fields(&tuple.types, addr, offset, what);
2369                }
2370
2371                TypeDefKind::Flags(_) => {}
2372
2373                TypeDefKind::Variant(variant) => {
2374                    self.deallocate_indirect_variant(
2375                        offset,
2376                        addr,
2377                        variant.tag(),
2378                        variant.cases.iter().map(|c| c.ty.as_ref()),
2379                        what,
2380                    );
2381                    self.emit(&GuestDeallocateVariant {
2382                        blocks: variant.cases.len(),
2383                    });
2384                }
2385
2386                TypeDefKind::Option(t) => {
2387                    self.deallocate_indirect_variant(offset, addr, Int::U8, [None, Some(t)], what);
2388                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2389                }
2390
2391                TypeDefKind::Result(e) => {
2392                    self.deallocate_indirect_variant(
2393                        offset,
2394                        addr,
2395                        Int::U8,
2396                        [e.ok.as_ref(), e.err.as_ref()],
2397                        what,
2398                    );
2399                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2400                }
2401
2402                TypeDefKind::Enum(_) => {}
2403
2404                TypeDefKind::Future(_) => unreachable!(),
2405                TypeDefKind::Stream(_) => unreachable!(),
2406                TypeDefKind::Unknown => unreachable!(),
2407                TypeDefKind::FixedSizeList(..) => todo!(),
2408            },
2409        }
2410    }
2411
2412    fn deallocate_indirect_variant<'b>(
2413        &mut self,
2414        offset: ArchitectureSize,
2415        addr: B::Operand,
2416        tag: Int,
2417        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
2418        what: Deallocate,
2419    ) {
2420        self.stack.push(addr.clone());
2421        self.load_intrepr(offset, tag);
2422        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
2423        for ty in cases {
2424            self.push_block();
2425            if let Some(ty) = ty {
2426                self.deallocate_indirect(ty, addr.clone(), payload_offset, what);
2427            }
2428            self.finish_block(0);
2429        }
2430    }
2431
2432    fn deallocate_indirect_fields(
2433        &mut self,
2434        tys: &[Type],
2435        addr: B::Operand,
2436        offset: ArchitectureSize,
2437        what: Deallocate,
2438    ) {
2439        for (field_offset, ty) in self.bindgen.sizes().field_offsets(tys) {
2440            self.deallocate_indirect(ty, addr.clone(), offset + (field_offset), what);
2441        }
2442    }
2443}
2444
2445fn cast(from: WasmType, to: WasmType) -> Bitcast {
2446    use WasmType::*;
2447
2448    match (from, to) {
2449        (I32, I32)
2450        | (I64, I64)
2451        | (F32, F32)
2452        | (F64, F64)
2453        | (Pointer, Pointer)
2454        | (PointerOrI64, PointerOrI64)
2455        | (Length, Length) => Bitcast::None,
2456
2457        (I32, I64) => Bitcast::I32ToI64,
2458        (F32, I32) => Bitcast::F32ToI32,
2459        (F64, I64) => Bitcast::F64ToI64,
2460
2461        (I64, I32) => Bitcast::I64ToI32,
2462        (I32, F32) => Bitcast::I32ToF32,
2463        (I64, F64) => Bitcast::I64ToF64,
2464
2465        (F32, I64) => Bitcast::F32ToI64,
2466        (I64, F32) => Bitcast::I64ToF32,
2467
2468        (I64, PointerOrI64) => Bitcast::I64ToP64,
2469        (Pointer, PointerOrI64) => Bitcast::PToP64,
2470        (_, PointerOrI64) => {
2471            Bitcast::Sequence(Box::new([cast(from, I64), cast(I64, PointerOrI64)]))
2472        }
2473
2474        (PointerOrI64, I64) => Bitcast::P64ToI64,
2475        (PointerOrI64, Pointer) => Bitcast::P64ToP,
2476        (PointerOrI64, _) => Bitcast::Sequence(Box::new([cast(PointerOrI64, I64), cast(I64, to)])),
2477
2478        (I32, Pointer) => Bitcast::I32ToP,
2479        (Pointer, I32) => Bitcast::PToI32,
2480        (I32, Length) => Bitcast::I32ToL,
2481        (Length, I32) => Bitcast::LToI32,
2482        (I64, Length) => Bitcast::I64ToL,
2483        (Length, I64) => Bitcast::LToI64,
2484        (Pointer, Length) => Bitcast::PToL,
2485        (Length, Pointer) => Bitcast::LToP,
2486
2487        (F32, Pointer | Length) => Bitcast::Sequence(Box::new([cast(F32, I32), cast(I32, to)])),
2488        (Pointer | Length, F32) => Bitcast::Sequence(Box::new([cast(from, I32), cast(I32, F32)])),
2489
2490        (F32, F64)
2491        | (F64, F32)
2492        | (F64, I32)
2493        | (I32, F64)
2494        | (Pointer | Length, I64 | F64)
2495        | (I64 | F64, Pointer | Length) => {
2496            unreachable!("Don't know how to bitcast from {:?} to {:?}", from, to);
2497        }
2498    }
2499}
2500
2501/// Flatten types in a given type
2502///
2503/// It is sometimes necessary to restrict the number of max parameters dynamically,
2504/// for example during an async guest import call (flat params are limited to 4)
2505fn flat_types(resolve: &Resolve, ty: &Type, max_params: Option<usize>) -> Option<Vec<WasmType>> {
2506    let max_params = max_params.unwrap_or(MAX_FLAT_PARAMS);
2507    let mut storage = iter::repeat_n(WasmType::I32, max_params).collect::<Vec<_>>();
2508    let mut flat = FlatTypes::new(storage.as_mut_slice());
2509    resolve.push_flat(ty, &mut flat).then_some(flat.to_vec())
2510}