wit_bindgen_core/
abi.rs

1use std::fmt;
2use std::iter;
3
4pub use wit_parser::abi::{AbiVariant, FlatTypes, WasmSignature, WasmType};
5use wit_parser::{
6    align_to_arch, Alignment, ArchitectureSize, ElementInfo, Enum, Flags, FlagsRepr, Function,
7    Handle, Int, Record, Resolve, Result_, SizeAlign, Tuple, Type, TypeDefKind, TypeId, Variant,
8};
9
10// Helper macro for defining instructions without having to have tons of
11// exhaustive `match` statements to update
12macro_rules! def_instruction {
13    (
14        $( #[$enum_attr:meta] )*
15        pub enum $name:ident<'a> {
16            $(
17                $( #[$attr:meta] )*
18                $variant:ident $( {
19                    $($field:ident : $field_ty:ty $(,)* )*
20                } )?
21                    :
22                [$num_popped:expr] => [$num_pushed:expr],
23            )*
24        }
25    ) => {
26        $( #[$enum_attr] )*
27        pub enum $name<'a> {
28            $(
29                $( #[$attr] )*
30                $variant $( {
31                    $(
32                        $field : $field_ty,
33                    )*
34                } )? ,
35            )*
36        }
37
38        impl $name<'_> {
39            /// How many operands does this instruction pop from the stack?
40            #[allow(unused_variables)]
41            pub fn operands_len(&self) -> usize {
42                match self {
43                    $(
44                        Self::$variant $( {
45                            $(
46                                $field,
47                            )*
48                        } )? => $num_popped,
49                    )*
50                }
51            }
52
53            /// How many results does this instruction push onto the stack?
54            #[allow(unused_variables)]
55            pub fn results_len(&self) -> usize {
56                match self {
57                    $(
58                        Self::$variant $( {
59                            $(
60                                $field,
61                            )*
62                        } )? => $num_pushed,
63                    )*
64                }
65            }
66        }
67    };
68}
69
70def_instruction! {
71    #[derive(Debug)]
72    pub enum Instruction<'a> {
73        /// Acquires the specified parameter and places it on the stack.
74        /// Depending on the context this may refer to wasm parameters or
75        /// interface types parameters.
76        GetArg { nth: usize } : [0] => [1],
77
78        // Integer const/manipulation instructions
79
80        /// Pushes the constant `val` onto the stack.
81        I32Const { val: i32 } : [0] => [1],
82        /// Casts the top N items on the stack using the `Bitcast` enum
83        /// provided. Consumes the same number of operands that this produces.
84        Bitcasts { casts: &'a [Bitcast] } : [casts.len()] => [casts.len()],
85        /// Pushes a number of constant zeros for each wasm type on the stack.
86        ConstZero { tys: &'a [WasmType] } : [0] => [tys.len()],
87
88        // Memory load/store instructions
89
90        /// Pops a pointer from the stack and loads a little-endian `i32` from
91        /// it, using the specified constant offset.
92        I32Load { offset: ArchitectureSize } : [1] => [1],
93        /// Pops a pointer from the stack and loads a little-endian `i8` from
94        /// it, using the specified constant offset. The value loaded is the
95        /// zero-extended to 32-bits
96        I32Load8U { offset: ArchitectureSize } : [1] => [1],
97        /// Pops a pointer from the stack and loads a little-endian `i8` from
98        /// it, using the specified constant offset. The value loaded is the
99        /// sign-extended to 32-bits
100        I32Load8S { offset: ArchitectureSize } : [1] => [1],
101        /// Pops a pointer from the stack and loads a little-endian `i16` from
102        /// it, using the specified constant offset. The value loaded is the
103        /// zero-extended to 32-bits
104        I32Load16U { offset: ArchitectureSize } : [1] => [1],
105        /// Pops a pointer from the stack and loads a little-endian `i16` from
106        /// it, using the specified constant offset. The value loaded is the
107        /// sign-extended to 32-bits
108        I32Load16S { offset: ArchitectureSize } : [1] => [1],
109        /// Pops a pointer from the stack and loads a little-endian `i64` from
110        /// it, using the specified constant offset.
111        I64Load { offset: ArchitectureSize } : [1] => [1],
112        /// Pops a pointer from the stack and loads a little-endian `f32` from
113        /// it, using the specified constant offset.
114        F32Load { offset: ArchitectureSize } : [1] => [1],
115        /// Pops a pointer from the stack and loads a little-endian `f64` from
116        /// it, using the specified constant offset.
117        F64Load { offset: ArchitectureSize } : [1] => [1],
118
119        /// Like `I32Load` or `I64Load`, but for loading pointer values.
120        PointerLoad { offset: ArchitectureSize } : [1] => [1],
121        /// Like `I32Load` or `I64Load`, but for loading array length values.
122        LengthLoad { offset: ArchitectureSize } : [1] => [1],
123
124        /// Pops a pointer from the stack and then an `i32` value.
125        /// Stores the value in little-endian at the pointer specified plus the
126        /// constant `offset`.
127        I32Store { offset: ArchitectureSize } : [2] => [0],
128        /// Pops a pointer from the stack and then an `i32` value.
129        /// Stores the low 8 bits of the value in little-endian at the pointer
130        /// specified plus the constant `offset`.
131        I32Store8 { offset: ArchitectureSize } : [2] => [0],
132        /// Pops a pointer from the stack and then an `i32` value.
133        /// Stores the low 16 bits of the value in little-endian at the pointer
134        /// specified plus the constant `offset`.
135        I32Store16 { offset: ArchitectureSize } : [2] => [0],
136        /// Pops a pointer from the stack and then an `i64` value.
137        /// Stores the value in little-endian at the pointer specified plus the
138        /// constant `offset`.
139        I64Store { offset: ArchitectureSize } : [2] => [0],
140        /// Pops a pointer from the stack and then an `f32` value.
141        /// Stores the value in little-endian at the pointer specified plus the
142        /// constant `offset`.
143        F32Store { offset: ArchitectureSize } : [2] => [0],
144        /// Pops a pointer from the stack and then an `f64` value.
145        /// Stores the value in little-endian at the pointer specified plus the
146        /// constant `offset`.
147        F64Store { offset: ArchitectureSize } : [2] => [0],
148
149        /// Like `I32Store` or `I64Store`, but for storing pointer values.
150        PointerStore { offset: ArchitectureSize } : [2] => [0],
151        /// Like `I32Store` or `I64Store`, but for storing array length values.
152        LengthStore { offset: ArchitectureSize } : [2] => [0],
153
154        // Scalar lifting/lowering
155
156        /// Converts an interface type `char` value to a 32-bit integer
157        /// representing the unicode scalar value.
158        I32FromChar : [1] => [1],
159        /// Converts an interface type `u64` value to a wasm `i64`.
160        I64FromU64 : [1] => [1],
161        /// Converts an interface type `s64` value to a wasm `i64`.
162        I64FromS64 : [1] => [1],
163        /// Converts an interface type `u32` value to a wasm `i32`.
164        I32FromU32 : [1] => [1],
165        /// Converts an interface type `s32` value to a wasm `i32`.
166        I32FromS32 : [1] => [1],
167        /// Converts an interface type `u16` value to a wasm `i32`.
168        I32FromU16 : [1] => [1],
169        /// Converts an interface type `s16` value to a wasm `i32`.
170        I32FromS16 : [1] => [1],
171        /// Converts an interface type `u8` value to a wasm `i32`.
172        I32FromU8 : [1] => [1],
173        /// Converts an interface type `s8` value to a wasm `i32`.
174        I32FromS8 : [1] => [1],
175        /// Conversion an interface type `f32` value to a wasm `f32`.
176        ///
177        /// This may be a noop for some implementations, but it's here in case the
178        /// native language representation of `f32` is different than the wasm
179        /// representation of `f32`.
180        CoreF32FromF32 : [1] => [1],
181        /// Conversion an interface type `f64` value to a wasm `f64`.
182        ///
183        /// This may be a noop for some implementations, but it's here in case the
184        /// native language representation of `f64` is different than the wasm
185        /// representation of `f64`.
186        CoreF64FromF64 : [1] => [1],
187
188        /// Converts a native wasm `i32` to an interface type `s8`.
189        ///
190        /// This will truncate the upper bits of the `i32`.
191        S8FromI32 : [1] => [1],
192        /// Converts a native wasm `i32` to an interface type `u8`.
193        ///
194        /// This will truncate the upper bits of the `i32`.
195        U8FromI32 : [1] => [1],
196        /// Converts a native wasm `i32` to an interface type `s16`.
197        ///
198        /// This will truncate the upper bits of the `i32`.
199        S16FromI32 : [1] => [1],
200        /// Converts a native wasm `i32` to an interface type `u16`.
201        ///
202        /// This will truncate the upper bits of the `i32`.
203        U16FromI32 : [1] => [1],
204        /// Converts a native wasm `i32` to an interface type `s32`.
205        S32FromI32 : [1] => [1],
206        /// Converts a native wasm `i32` to an interface type `u32`.
207        U32FromI32 : [1] => [1],
208        /// Converts a native wasm `i64` to an interface type `s64`.
209        S64FromI64 : [1] => [1],
210        /// Converts a native wasm `i64` to an interface type `u64`.
211        U64FromI64 : [1] => [1],
212        /// Converts a native wasm `i32` to an interface type `char`.
213        ///
214        /// It's safe to assume that the `i32` is indeed a valid unicode code point.
215        CharFromI32 : [1] => [1],
216        /// Converts a native wasm `f32` to an interface type `f32`.
217        F32FromCoreF32 : [1] => [1],
218        /// Converts a native wasm `f64` to an interface type `f64`.
219        F64FromCoreF64 : [1] => [1],
220
221        /// Creates a `bool` from an `i32` input, trapping if the `i32` isn't
222        /// zero or one.
223        BoolFromI32 : [1] => [1],
224        /// Creates an `i32` from a `bool` input, must return 0 or 1.
225        I32FromBool : [1] => [1],
226
227        // lists
228
229        /// Lowers a list where the element's layout in the native language is
230        /// expected to match the canonical ABI definition of interface types.
231        ///
232        /// Pops a list value from the stack and pushes the pointer/length onto
233        /// the stack. If `realloc` is set to `Some` then this is expected to
234        /// *consume* the list which means that the data needs to be copied. An
235        /// allocation/copy is expected when:
236        ///
237        /// * A host is calling a wasm export with a list (it needs to copy the
238        ///   list in to the callee's module, allocating space with `realloc`)
239        /// * A wasm export is returning a list (it's expected to use `realloc`
240        ///   to give ownership of the list to the caller.
241        /// * A host is returning a list in a import definition, meaning that
242        ///   space needs to be allocated in the caller with `realloc`).
243        ///
244        /// A copy does not happen (e.g. `realloc` is `None`) when:
245        ///
246        /// * A wasm module calls an import with the list. In this situation
247        ///   it's expected the caller will know how to access this module's
248        ///   memory (e.g. the host has raw access or wasm-to-wasm communication
249        ///   would copy the list).
250        ///
251        /// If `realloc` is `Some` then the adapter is not responsible for
252        /// cleaning up this list because the other end is receiving the
253        /// allocation. If `realloc` is `None` then the adapter is responsible
254        /// for cleaning up any temporary allocation it created, if any.
255        ListCanonLower {
256            element: &'a Type,
257            realloc: Option<&'a str>,
258        } : [1] => [2],
259
260        /// Same as `ListCanonLower`, but used for strings
261        StringLower {
262            realloc: Option<&'a str>,
263        } : [1] => [2],
264
265        /// Lowers a list where the element's layout in the native language is
266        /// not expected to match the canonical ABI definition of interface
267        /// types.
268        ///
269        /// Pops a list value from the stack and pushes the pointer/length onto
270        /// the stack. This operation also pops a block from the block stack
271        /// which is used as the iteration body of writing each element of the
272        /// list consumed.
273        ///
274        /// The `realloc` field here behaves the same way as `ListCanonLower`.
275        /// It's only set to `None` when a wasm module calls a declared import.
276        /// Otherwise lowering in other contexts requires allocating memory for
277        /// the receiver to own.
278        ListLower {
279            element: &'a Type,
280            realloc: Option<&'a str>,
281        } : [1] => [2],
282
283        /// Lifts a list which has a canonical representation into an interface
284        /// types value.
285        ///
286        /// The term "canonical" representation here means that the
287        /// representation of the interface types value in the native language
288        /// exactly matches the canonical ABI definition of the type.
289        ///
290        /// This will consume two `i32` values from the stack, a pointer and a
291        /// length, and then produces an interface value list.
292        ListCanonLift {
293            element: &'a Type,
294            ty: TypeId,
295        } : [2] => [1],
296
297        /// Same as `ListCanonLift`, but used for strings
298        StringLift : [2] => [1],
299
300        /// Lifts a list which into an interface types value.
301        ///
302        /// This will consume two `i32` values from the stack, a pointer and a
303        /// length, and then produces an interface value list.
304        ///
305        /// This will also pop a block from the block stack which is how to
306        /// read each individual element from the list.
307        ListLift {
308            element: &'a Type,
309            ty: TypeId,
310        } : [2] => [1],
311
312        /// Pushes an operand onto the stack representing the list item from
313        /// each iteration of the list.
314        ///
315        /// This is only used inside of blocks related to lowering lists.
316        IterElem { element: &'a Type } : [0] => [1],
317
318        /// Pushes an operand onto the stack representing the base pointer of
319        /// the next element in a list.
320        ///
321        /// This is used for both lifting and lowering lists.
322        IterBasePointer : [0] => [1],
323
324        // records and tuples
325
326        /// Pops a record value off the stack, decomposes the record to all of
327        /// its fields, and then pushes the fields onto the stack.
328        RecordLower {
329            record: &'a Record,
330            name: &'a str,
331            ty: TypeId,
332        } : [1] => [record.fields.len()],
333
334        /// Pops all fields for a record off the stack and then composes them
335        /// into a record.
336        RecordLift {
337            record: &'a Record,
338            name: &'a str,
339            ty: TypeId,
340        } : [record.fields.len()] => [1],
341
342        /// Create an `i32` from a handle.
343        HandleLower {
344            handle: &'a Handle,
345            name: &'a str,
346            ty: TypeId,
347        } : [1] => [1],
348
349        /// Create a handle from an `i32`.
350        HandleLift {
351            handle: &'a Handle,
352            name: &'a str,
353            ty: TypeId,
354        } : [1] => [1],
355
356        /// Create an `i32` from a future.
357        FutureLower {
358            payload: &'a Option<Type>,
359            ty: TypeId,
360        } : [1] => [1],
361
362        /// Create a future from an `i32`.
363        FutureLift {
364            payload: &'a Option<Type>,
365            ty: TypeId,
366        } : [1] => [1],
367
368        /// Create an `i32` from a stream.
369        StreamLower {
370            payload: &'a Option<Type>,
371            ty: TypeId,
372        } : [1] => [1],
373
374        /// Create a stream from an `i32`.
375        StreamLift {
376            payload: &'a Option<Type>,
377            ty: TypeId,
378        } : [1] => [1],
379
380        /// Create an `i32` from an error-context.
381        ErrorContextLower : [1] => [1],
382
383        /// Create a error-context from an `i32`.
384        ErrorContextLift : [1] => [1],
385
386        /// Pops a tuple value off the stack, decomposes the tuple to all of
387        /// its fields, and then pushes the fields onto the stack.
388        TupleLower {
389            tuple: &'a Tuple,
390            ty: TypeId,
391        } : [1] => [tuple.types.len()],
392
393        /// Pops all fields for a tuple off the stack and then composes them
394        /// into a tuple.
395        TupleLift {
396            tuple: &'a Tuple,
397            ty: TypeId,
398        } : [tuple.types.len()] => [1],
399
400        /// Converts a language-specific record-of-bools to a list of `i32`.
401        FlagsLower {
402            flags: &'a Flags,
403            name: &'a str,
404            ty: TypeId,
405        } : [1] => [flags.repr().count()],
406        /// Converts a list of native wasm `i32` to a language-specific
407        /// record-of-bools.
408        FlagsLift {
409            flags: &'a Flags,
410            name: &'a str,
411            ty: TypeId,
412        } : [flags.repr().count()] => [1],
413
414        // variants
415
416        /// This is a special instruction used for `VariantLower`
417        /// instruction to determine the name of the payload, if present, to use
418        /// within each block.
419        ///
420        /// Each sub-block will have this be the first instruction, and if it
421        /// lowers a payload it will expect something bound to this name.
422        VariantPayloadName : [0] => [1],
423
424        /// Pops a variant off the stack as well as `ty.cases.len()` blocks
425        /// from the code generator. Uses each of those blocks and the value
426        /// from the stack to produce `nresults` of items.
427        VariantLower {
428            variant: &'a Variant,
429            name: &'a str,
430            ty: TypeId,
431            results: &'a [WasmType],
432        } : [1] => [results.len()],
433
434        /// Pops an `i32` off the stack as well as `ty.cases.len()` blocks
435        /// from the code generator. Uses each of those blocks and the value
436        /// from the stack to produce a final variant.
437        VariantLift {
438            variant: &'a Variant,
439            name: &'a str,
440            ty: TypeId,
441        } : [1] => [1],
442
443        /// Pops an enum off the stack and pushes the `i32` representation.
444        EnumLower {
445            enum_: &'a Enum,
446            name: &'a str,
447            ty: TypeId,
448        } : [1] => [1],
449
450        /// Pops an `i32` off the stack and lifts it into the `enum` specified.
451        EnumLift {
452            enum_: &'a Enum,
453            name: &'a str,
454            ty: TypeId,
455        } : [1] => [1],
456
457        /// Specialization of `VariantLower` for specifically `option<T>` types,
458        /// otherwise behaves the same as `VariantLower` (e.g. two blocks for
459        /// the two cases.
460        OptionLower {
461            payload: &'a Type,
462            ty: TypeId,
463            results: &'a [WasmType],
464        } : [1] => [results.len()],
465
466        /// Specialization of `VariantLift` for specifically the `option<T>`
467        /// type. Otherwise behaves the same as the `VariantLift` instruction
468        /// with two blocks for the lift.
469        OptionLift {
470            payload: &'a Type,
471            ty: TypeId,
472        } : [1] => [1],
473
474        /// Specialization of `VariantLower` for specifically `result<T, E>`
475        /// types, otherwise behaves the same as `VariantLower` (e.g. two blocks
476        /// for the two cases.
477        ResultLower {
478            result: &'a Result_
479            ty: TypeId,
480            results: &'a [WasmType],
481        } : [1] => [results.len()],
482
483        /// Specialization of `VariantLift` for specifically the `result<T,
484        /// E>` type. Otherwise behaves the same as the `VariantLift`
485        /// instruction with two blocks for the lift.
486        ResultLift {
487            result: &'a Result_,
488            ty: TypeId,
489        } : [1] => [1],
490
491        // calling/control flow
492
493        /// Represents a call to a raw WebAssembly API. The module/name are
494        /// provided inline as well as the types if necessary.
495        CallWasm {
496            name: &'a str,
497            sig: &'a WasmSignature,
498        } : [sig.params.len()] => [sig.results.len()],
499
500        /// Same as `CallWasm`, except the dual where an interface is being
501        /// called rather than a raw wasm function.
502        ///
503        /// Note that this will be used for async functions, and `async_`
504        /// indicates whether the function should be invoked in an async
505        /// fashion.
506        CallInterface {
507            func: &'a Function,
508            async_: bool,
509        } : [func.params.len()] => [usize::from(func.result.is_some())],
510
511        /// Returns `amt` values on the stack. This is always the last
512        /// instruction.
513        Return { amt: usize, func: &'a Function } : [*amt] => [0],
514
515        /// Calls the `realloc` function specified in a malloc-like fashion
516        /// allocating `size` bytes with alignment `align`.
517        ///
518        /// Pushes the returned pointer onto the stack.
519        Malloc {
520            realloc: &'static str,
521            size: ArchitectureSize,
522            align: Alignment,
523        } : [0] => [1],
524
525        /// Used exclusively for guest-code generation this indicates that
526        /// the standard memory deallocation function needs to be invoked with
527        /// the specified parameters.
528        ///
529        /// This will pop a pointer from the stack and push nothing.
530        GuestDeallocate {
531            size: ArchitectureSize,
532            align: Alignment,
533        } : [1] => [0],
534
535        /// Used exclusively for guest-code generation this indicates that
536        /// a string is being deallocated. The ptr/length are on the stack and
537        /// are poppped off and used to deallocate the string.
538        GuestDeallocateString : [2] => [0],
539
540        /// Used exclusively for guest-code generation this indicates that
541        /// a list is being deallocated. The ptr/length are on the stack and
542        /// are poppped off and used to deallocate the list.
543        ///
544        /// This variant also pops a block off the block stack to be used as the
545        /// body of the deallocation loop.
546        GuestDeallocateList {
547            element: &'a Type,
548        } : [2] => [0],
549
550        /// Used exclusively for guest-code generation this indicates that
551        /// a variant is being deallocated. The integer discriminant is popped
552        /// off the stack as well as `blocks` number of blocks popped from the
553        /// blocks stack. The variant is used to select, at runtime, which of
554        /// the blocks is executed to deallocate the variant.
555        GuestDeallocateVariant {
556            blocks: usize,
557        } : [1] => [0],
558
559        /// Deallocates the language-specific handle representation on the top
560        /// of the stack. Used for async imports.
561        DropHandle { ty: &'a Type } : [1] => [0],
562
563        /// Call `task.return` for an async-lifted export.
564        ///
565        /// This will call core wasm import `name` which will be mapped to
566        /// `task.return` later on. The function given has `params` as its
567        /// parameters and it will return no results. This is used to pass the
568        /// lowered representation of a function's results to `task.return`.
569        AsyncTaskReturn { name: &'a str, params: &'a [WasmType] } : [params.len()] => [0],
570
571        /// Force the evaluation of the specified number of expressions and push
572        /// the results to the stack.
573        ///
574        /// This is useful prior to disposing of temporary variables and/or
575        /// allocations which are referenced by one or more not-yet-evaluated
576        /// expressions.
577        Flush { amt: usize } : [*amt] => [*amt],
578    }
579}
580
581#[derive(Debug, PartialEq)]
582pub enum Bitcast {
583    // Upcasts
584    F32ToI32,
585    F64ToI64,
586    I32ToI64,
587    F32ToI64,
588
589    // Downcasts
590    I32ToF32,
591    I64ToF64,
592    I64ToI32,
593    I64ToF32,
594
595    // PointerOrI64 conversions. These preserve provenance when the source
596    // or destination is a pointer value.
597    //
598    // These are used when pointer values are being stored in
599    // (ToP64) and loaded out of (P64To) PointerOrI64 values, so they
600    // always have to preserve provenance when the value being loaded or
601    // stored is a pointer.
602    P64ToI64,
603    I64ToP64,
604    P64ToP,
605    PToP64,
606
607    // Pointer<->number conversions. These do not preserve provenance.
608    //
609    // These are used when integer or floating-point values are being stored in
610    // (I32ToP/etc.) and loaded out of (PToI32/etc.) pointer values, so they
611    // never have any provenance to preserve.
612    I32ToP,
613    PToI32,
614    PToL,
615    LToP,
616
617    // Number<->Number conversions.
618    I32ToL,
619    LToI32,
620    I64ToL,
621    LToI64,
622
623    // Multiple conversions in sequence.
624    Sequence(Box<[Bitcast; 2]>),
625
626    None,
627}
628
629/// Whether the glue code surrounding a call is lifting arguments and lowering
630/// results or vice versa.
631#[derive(Clone, Copy, PartialEq, Eq)]
632pub enum LiftLower {
633    /// When the glue code lifts arguments and lowers results.
634    ///
635    /// ```text
636    /// Wasm --lift-args--> SourceLanguage; call; SourceLanguage --lower-results--> Wasm
637    /// ```
638    LiftArgsLowerResults,
639    /// When the glue code lowers arguments and lifts results.
640    ///
641    /// ```text
642    /// SourceLanguage --lower-args--> Wasm; call; Wasm --lift-results--> SourceLanguage
643    /// ```
644    LowerArgsLiftResults,
645}
646
647/// Trait for language implementors to use to generate glue code between native
648/// WebAssembly signatures and interface types signatures.
649///
650/// This is used as an implementation detail in interpreting the ABI between
651/// interface types and wasm types. Eventually this will be driven by interface
652/// types adapters themselves, but for now the ABI of a function dictates what
653/// instructions are fed in.
654///
655/// Types implementing `Bindgen` are incrementally fed `Instruction` values to
656/// generate code for. Instructions operate like a stack machine where each
657/// instruction has a list of inputs and a list of outputs (provided by the
658/// `emit` function).
659pub trait Bindgen {
660    /// The intermediate type for fragments of code for this type.
661    ///
662    /// For most languages `String` is a suitable intermediate type.
663    type Operand: Clone + fmt::Debug;
664
665    /// Emit code to implement the given instruction.
666    ///
667    /// Each operand is given in `operands` and can be popped off if ownership
668    /// is required. It's guaranteed that `operands` has the appropriate length
669    /// for the `inst` given, as specified with [`Instruction`].
670    ///
671    /// Each result variable should be pushed onto `results`. This function must
672    /// push the appropriate number of results or binding generation will panic.
673    fn emit(
674        &mut self,
675        resolve: &Resolve,
676        inst: &Instruction<'_>,
677        operands: &mut Vec<Self::Operand>,
678        results: &mut Vec<Self::Operand>,
679    );
680
681    /// Gets a operand reference to the return pointer area.
682    ///
683    /// The provided size and alignment is for the function's return type.
684    fn return_pointer(&mut self, size: ArchitectureSize, align: Alignment) -> Self::Operand;
685
686    /// Enters a new block of code to generate code for.
687    ///
688    /// This is currently exclusively used for constructing variants. When a
689    /// variant is constructed a block here will be pushed for each case of a
690    /// variant, generating the code necessary to translate a variant case.
691    ///
692    /// Blocks are completed with `finish_block` below. It's expected that `emit`
693    /// will always push code (if necessary) into the "current block", which is
694    /// updated by calling this method and `finish_block` below.
695    fn push_block(&mut self);
696
697    /// Indicates to the code generator that a block is completed, and the
698    /// `operand` specified was the resulting value of the block.
699    ///
700    /// This method will be used to compute the value of each arm of lifting a
701    /// variant. The `operand` will be `None` if the variant case didn't
702    /// actually have any type associated with it. Otherwise it will be `Some`
703    /// as the last value remaining on the stack representing the value
704    /// associated with a variant's `case`.
705    ///
706    /// It's expected that this will resume code generation in the previous
707    /// block before `push_block` was called. This must also save the results
708    /// of the current block internally for instructions like `ResultLift` to
709    /// use later.
710    fn finish_block(&mut self, operand: &mut Vec<Self::Operand>);
711
712    /// Returns size information that was previously calculated for all types.
713    fn sizes(&self) -> &SizeAlign;
714
715    /// Returns whether or not the specified element type is represented in a
716    /// "canonical" form for lists. This dictates whether the `ListCanonLower`
717    /// and `ListCanonLift` instructions are used or not.
718    fn is_list_canonical(&self, resolve: &Resolve, element: &Type) -> bool;
719}
720
721/// Generates an abstract sequence of instructions which represents this
722/// function being adapted as an imported function.
723///
724/// The instructions here, when executed, will emulate a language with
725/// interface types calling the concrete wasm implementation. The parameters
726/// for the returned instruction sequence are the language's own
727/// interface-types parameters. One instruction in the instruction stream
728/// will be a `Call` which represents calling the actual raw wasm function
729/// signature.
730///
731/// This function is useful, for example, if you're building a language
732/// generator for WASI bindings. This will document how to translate
733/// language-specific values into the wasm types to call a WASI function,
734/// and it will also automatically convert the results of the WASI function
735/// back to a language-specific value.
736pub fn call(
737    resolve: &Resolve,
738    variant: AbiVariant,
739    lift_lower: LiftLower,
740    func: &Function,
741    bindgen: &mut impl Bindgen,
742    async_: bool,
743) {
744    Generator::new(resolve, bindgen).call(func, variant, lift_lower, async_);
745}
746
747pub fn lower_to_memory<B: Bindgen>(
748    resolve: &Resolve,
749    bindgen: &mut B,
750    address: B::Operand,
751    value: B::Operand,
752    ty: &Type,
753) {
754    let mut generator = Generator::new(resolve, bindgen);
755    // TODO: make this configurable? Right now this function is only called for
756    // future/stream callbacks so it's appropriate to skip realloc here as it's
757    // all "lower for wasm import", but this might get reused for something else
758    // in the future.
759    generator.realloc = Some(Realloc::Export("cabi_realloc"));
760    generator.stack.push(value);
761    generator.write_to_memory(ty, address, Default::default());
762}
763
764pub fn lower_flat<B: Bindgen>(
765    resolve: &Resolve,
766    bindgen: &mut B,
767    value: B::Operand,
768    ty: &Type,
769) -> Vec<B::Operand> {
770    let mut generator = Generator::new(resolve, bindgen);
771    generator.stack.push(value);
772    generator.realloc = Some(Realloc::Export("cabi_realloc"));
773    generator.lower(ty);
774    generator.stack
775}
776
777pub fn lift_from_memory<B: Bindgen>(
778    resolve: &Resolve,
779    bindgen: &mut B,
780    address: B::Operand,
781    ty: &Type,
782) -> B::Operand {
783    let mut generator = Generator::new(resolve, bindgen);
784    generator.read_from_memory(ty, address, Default::default());
785    generator.stack.pop().unwrap()
786}
787
788/// Used in a similar manner as the `Interface::call` function except is
789/// used to generate the `post-return` callback for `func`.
790///
791/// This is only intended to be used in guest generators for exported
792/// functions and will primarily generate `GuestDeallocate*` instructions,
793/// plus others used as input to those instructions.
794pub fn post_return(resolve: &Resolve, func: &Function, bindgen: &mut impl Bindgen) {
795    Generator::new(resolve, bindgen).post_return(func);
796}
797
798/// Returns whether the `Function` specified needs a post-return function to
799/// be generated in guest code.
800///
801/// This is used when the return value contains a memory allocation such as
802/// a list or a string primarily.
803pub fn guest_export_needs_post_return(resolve: &Resolve, func: &Function) -> bool {
804    func.result
805        .map(|t| needs_deallocate(resolve, &t, Deallocate::Lists))
806        .unwrap_or(false)
807}
808
809fn needs_deallocate(resolve: &Resolve, ty: &Type, what: Deallocate) -> bool {
810    match ty {
811        Type::String => true,
812        Type::ErrorContext => true,
813        Type::Id(id) => match &resolve.types[*id].kind {
814            TypeDefKind::List(_) => true,
815            TypeDefKind::Type(t) => needs_deallocate(resolve, t, what),
816            TypeDefKind::Handle(Handle::Own(_)) => what.handles(),
817            TypeDefKind::Handle(Handle::Borrow(_)) => false,
818            TypeDefKind::Resource => false,
819            TypeDefKind::Record(r) => r
820                .fields
821                .iter()
822                .any(|f| needs_deallocate(resolve, &f.ty, what)),
823            TypeDefKind::Tuple(t) => t.types.iter().any(|t| needs_deallocate(resolve, t, what)),
824            TypeDefKind::Variant(t) => t
825                .cases
826                .iter()
827                .filter_map(|t| t.ty.as_ref())
828                .any(|t| needs_deallocate(resolve, t, what)),
829            TypeDefKind::Option(t) => needs_deallocate(resolve, t, what),
830            TypeDefKind::Result(t) => [&t.ok, &t.err]
831                .iter()
832                .filter_map(|t| t.as_ref())
833                .any(|t| needs_deallocate(resolve, t, what)),
834            TypeDefKind::Flags(_) | TypeDefKind::Enum(_) => false,
835            TypeDefKind::Future(_) | TypeDefKind::Stream(_) => what.handles(),
836            TypeDefKind::Unknown => unreachable!(),
837            TypeDefKind::FixedSizeList(..) => todo!(),
838        },
839
840        Type::Bool
841        | Type::U8
842        | Type::S8
843        | Type::U16
844        | Type::S16
845        | Type::U32
846        | Type::S32
847        | Type::U64
848        | Type::S64
849        | Type::F32
850        | Type::F64
851        | Type::Char => false,
852    }
853}
854
855/// Generate instructions in `bindgen` to deallocate all lists in `ptr` where
856/// that's a pointer to a sequence of `types` stored in linear memory.
857pub fn deallocate_lists_in_types<B: Bindgen>(
858    resolve: &Resolve,
859    types: &[Type],
860    operands: &[B::Operand],
861    indirect: bool,
862    bindgen: &mut B,
863) {
864    Generator::new(resolve, bindgen).deallocate_in_types(
865        types,
866        operands,
867        indirect,
868        Deallocate::Lists,
869    );
870}
871
872/// Generate instructions in `bindgen` to deallocate all lists in `ptr` where
873/// that's a pointer to a sequence of `types` stored in linear memory.
874pub fn deallocate_lists_and_own_in_types<B: Bindgen>(
875    resolve: &Resolve,
876    types: &[Type],
877    operands: &[B::Operand],
878    indirect: bool,
879    bindgen: &mut B,
880) {
881    Generator::new(resolve, bindgen).deallocate_in_types(
882        types,
883        operands,
884        indirect,
885        Deallocate::ListsAndOwn,
886    );
887}
888
889#[derive(Copy, Clone)]
890pub enum Realloc {
891    None,
892    Export(&'static str),
893}
894
895/// What to deallocate in various `deallocate_*` methods.
896#[derive(Copy, Clone)]
897enum Deallocate {
898    /// Only deallocate lists.
899    Lists,
900    /// Deallocate lists and owned resources such as `own<T>` and
901    /// futures/streams.
902    ListsAndOwn,
903}
904
905impl Deallocate {
906    fn handles(&self) -> bool {
907        match self {
908            Deallocate::Lists => false,
909            Deallocate::ListsAndOwn => true,
910        }
911    }
912}
913
914struct Generator<'a, B: Bindgen> {
915    bindgen: &'a mut B,
916    resolve: &'a Resolve,
917    operands: Vec<B::Operand>,
918    results: Vec<B::Operand>,
919    stack: Vec<B::Operand>,
920    return_pointer: Option<B::Operand>,
921    realloc: Option<Realloc>,
922}
923
924const MAX_FLAT_PARAMS: usize = 16;
925const MAX_FLAT_ASYNC_PARAMS: usize = 4;
926
927impl<'a, B: Bindgen> Generator<'a, B> {
928    fn new(resolve: &'a Resolve, bindgen: &'a mut B) -> Generator<'a, B> {
929        Generator {
930            resolve,
931            bindgen,
932            operands: Vec::new(),
933            results: Vec::new(),
934            stack: Vec::new(),
935            return_pointer: None,
936            realloc: None,
937        }
938    }
939
940    fn call(&mut self, func: &Function, variant: AbiVariant, lift_lower: LiftLower, async_: bool) {
941        let sig = self.resolve.wasm_signature(variant, func);
942
943        // Lowering parameters calling a wasm import _or_ returning a result
944        // from an async-lifted wasm export means we don't need to pass
945        // ownership, but we pass ownership in all other cases.
946        let realloc = match (variant, lift_lower, async_) {
947            (AbiVariant::GuestImport, LiftLower::LowerArgsLiftResults, _)
948            | (
949                AbiVariant::GuestExport
950                | AbiVariant::GuestExportAsync
951                | AbiVariant::GuestExportAsyncStackful,
952                LiftLower::LiftArgsLowerResults,
953                true,
954            ) => Realloc::None,
955            _ => Realloc::Export("cabi_realloc"),
956        };
957        assert!(self.realloc.is_none());
958
959        match lift_lower {
960            LiftLower::LowerArgsLiftResults => {
961                assert!(!async_, "generators should not be using this for async");
962
963                self.realloc = Some(realloc);
964                if let (AbiVariant::GuestExport, true) = (variant, async_) {
965                    unimplemented!("host-side code generation for async lift/lower not supported");
966                }
967
968                let lower_to_memory = |self_: &mut Self, ptr: B::Operand| {
969                    let mut offset = ArchitectureSize::default();
970                    for (nth, (_, ty)) in func.params.iter().enumerate() {
971                        self_.emit(&Instruction::GetArg { nth });
972                        offset = align_to_arch(offset, self_.bindgen.sizes().align(ty));
973                        self_.write_to_memory(ty, ptr.clone(), offset);
974                        offset += self_.bindgen.sizes().size(ty);
975                    }
976
977                    self_.stack.push(ptr);
978                };
979
980                if !sig.indirect_params {
981                    // If the parameters for this function aren't indirect
982                    // (there aren't too many) then we simply do a normal lower
983                    // operation for them all.
984                    for (nth, (_, ty)) in func.params.iter().enumerate() {
985                        self.emit(&Instruction::GetArg { nth });
986                        self.lower(ty);
987                    }
988                } else {
989                    // ... otherwise if parameters are indirect space is
990                    // allocated for them and each argument is lowered
991                    // individually into memory.
992                    let ElementInfo { size, align } = self
993                        .bindgen
994                        .sizes()
995                        .record(func.params.iter().map(|t| &t.1));
996                    let ptr = match variant {
997                        // When a wasm module calls an import it will provide
998                        // space that isn't explicitly deallocated.
999                        AbiVariant::GuestImport => self.bindgen.return_pointer(size, align),
1000                        // When calling a wasm module from the outside, though,
1001                        // malloc needs to be called.
1002                        AbiVariant::GuestExport => {
1003                            self.emit(&Instruction::Malloc {
1004                                realloc: "cabi_realloc",
1005                                size,
1006                                align,
1007                            });
1008                            self.stack.pop().unwrap()
1009                        }
1010                        AbiVariant::GuestImportAsync
1011                        | AbiVariant::GuestExportAsync
1012                        | AbiVariant::GuestExportAsyncStackful => {
1013                            unreachable!()
1014                        }
1015                    };
1016                    lower_to_memory(self, ptr);
1017                }
1018                self.realloc = None;
1019
1020                // If necessary we may need to prepare a return pointer for
1021                // this ABI.
1022                if variant == AbiVariant::GuestImport && sig.retptr {
1023                    let info = self.bindgen.sizes().params(&func.result);
1024                    let ptr = self.bindgen.return_pointer(info.size, info.align);
1025                    self.return_pointer = Some(ptr.clone());
1026                    self.stack.push(ptr);
1027                }
1028
1029                assert_eq!(self.stack.len(), sig.params.len());
1030                self.emit(&Instruction::CallWasm {
1031                    name: &func.name,
1032                    sig: &sig,
1033                });
1034
1035                if !sig.retptr {
1036                    // With no return pointer in use we can simply lift the
1037                    // result(s) of the function from the result of the core
1038                    // wasm function.
1039                    if let Some(ty) = &func.result {
1040                        self.lift(ty)
1041                    }
1042                } else {
1043                    let ptr = match variant {
1044                        // imports into guests means it's a wasm module
1045                        // calling an imported function. We supplied the
1046                        // return pointer as the last argument (saved in
1047                        // `self.return_pointer`) so we use that to read
1048                        // the result of the function from memory.
1049                        AbiVariant::GuestImport => {
1050                            assert!(sig.results.is_empty());
1051                            self.return_pointer.take().unwrap()
1052                        }
1053
1054                        // guest exports means that this is a host
1055                        // calling wasm so wasm returned a pointer to where
1056                        // the result is stored
1057                        AbiVariant::GuestExport => self.stack.pop().unwrap(),
1058
1059                        AbiVariant::GuestImportAsync
1060                        | AbiVariant::GuestExportAsync
1061                        | AbiVariant::GuestExportAsyncStackful => {
1062                            unreachable!()
1063                        }
1064                    };
1065
1066                    self.read_results_from_memory(
1067                        &func.result,
1068                        ptr.clone(),
1069                        ArchitectureSize::default(),
1070                    );
1071                    self.emit(&Instruction::Flush {
1072                        amt: usize::from(func.result.is_some()),
1073                    });
1074                }
1075
1076                self.emit(&Instruction::Return {
1077                    func,
1078                    amt: usize::from(func.result.is_some()),
1079                });
1080            }
1081
1082            LiftLower::LiftArgsLowerResults => {
1083                let max_flat_params = match (variant, async_) {
1084                    (AbiVariant::GuestImport | AbiVariant::GuestImportAsync, _is_async @ true) => {
1085                        MAX_FLAT_ASYNC_PARAMS
1086                    }
1087                    _ => MAX_FLAT_PARAMS,
1088                };
1089
1090                // Read parameters from memory
1091                let read_from_memory = |self_: &mut Self| {
1092                    let mut offset = ArchitectureSize::default();
1093                    let ptr = self_
1094                        .stack
1095                        .pop()
1096                        .expect("empty stack during read param from memory");
1097                    for (_, ty) in func.params.iter() {
1098                        offset = align_to_arch(offset, self_.bindgen.sizes().align(ty));
1099                        self_.read_from_memory(ty, ptr.clone(), offset);
1100                        offset += self_.bindgen.sizes().size(ty);
1101                    }
1102                };
1103
1104                // Resolve parameters
1105                if sig.indirect_params {
1106                    // If parameters were passed indirectly, arguments must be
1107                    // read in succession from memory, with the pointer to the arguments
1108                    // being the first argument to the function.
1109                    self.emit(&Instruction::GetArg { nth: 0 });
1110                    read_from_memory(self);
1111                } else {
1112                    // ... otherwise, if parameters were passed directly then we lift each
1113                    // argument in succession from the component wasm types that
1114                    // make-up the type.
1115                    let mut offset = 0;
1116                    for (_, ty) in func.params.iter() {
1117                        let types = flat_types(self.resolve, ty, Some(max_flat_params))
1118                            .expect(&format!("direct parameter load failed to produce types during generation of fn call (func name: '{}')", func.name));
1119                        for _ in 0..types.len() {
1120                            self.emit(&Instruction::GetArg { nth: offset });
1121                            offset += 1;
1122                        }
1123                        self.lift(ty);
1124                    }
1125                }
1126
1127                // ... and that allows us to call the interface types function
1128                self.emit(&Instruction::CallInterface { func, async_ });
1129
1130                // The return value of an async function is *not* the result of the function
1131                // itself or a pointer but rather a status code.
1132                //
1133                // Asynchronous functions will call `task.return` after the
1134                // interface function completes, so lowering is conditional
1135                // based on slightly different logic for the `task.return`
1136                // intrinsic.
1137                //
1138                // Note that in the async import case teh code below deals with the CM function being lowered,
1139                // not the core function that is underneath that (i.e. func.result may be empty,
1140                // where the associated core function underneath must have a i32 status code result)
1141                let (lower_to_memory, async_flat_results) = match (async_, &func.result) {
1142                    // All async cases pass along the function results and flatten where necesary
1143                    (_is_async @ true, func_result) => {
1144                        let results = match &func_result {
1145                            Some(ty) => flat_types(self.resolve, ty, Some(max_flat_params)),
1146                            None => Some(Vec::new()),
1147                        };
1148                        (results.is_none(), Some(results))
1149                    }
1150                    // All other non-async cases
1151                    (_is_async @ false, _) => (sig.retptr, None),
1152                };
1153
1154                // This was dynamically allocated by the caller (or async start
1155                // function) so after it's been read by the guest we need to
1156                // deallocate it.
1157                if let AbiVariant::GuestExport
1158                | AbiVariant::GuestExportAsync
1159                | AbiVariant::GuestExportAsyncStackful = variant
1160                {
1161                    if sig.indirect_params && !async_ {
1162                        let ElementInfo { size, align } = self
1163                            .bindgen
1164                            .sizes()
1165                            .record(func.params.iter().map(|t| &t.1));
1166                        self.emit(&Instruction::GetArg { nth: 0 });
1167                        self.emit(&Instruction::GuestDeallocate { size, align });
1168                    }
1169                }
1170
1171                self.realloc = Some(realloc);
1172
1173                // Perform memory lowing of relevant results, including out pointers as well as traditional results
1174                match (lower_to_memory, sig.retptr, variant) {
1175                    // For sync calls, if no lowering to memory is required and there *is* a return pointer in use
1176                    // then we need to lower then simply lower the result(s) and return that directly from the function.
1177                    (_lower_to_memory @ false, _, _) => {
1178                        if let Some(ty) = &func.result {
1179                            self.lower(ty);
1180                        }
1181                    }
1182
1183                    // Lowering to memory for a guest import
1184                    //
1185                    // When a function is imported to a guest this means
1186                    // it's a host providing the implementation of the
1187                    // import. The result is stored in the pointer
1188                    // specified in the last argument, so we get the
1189                    // pointer here and then write the return value into
1190                    // it.
1191                    (
1192                        _lower_to_memory @ true,
1193                        _has_ret_ptr @ true,
1194                        AbiVariant::GuestImport | AbiVariant::GuestImportAsync,
1195                    ) => {
1196                        self.emit(&Instruction::GetArg {
1197                            nth: sig.params.len() - 1,
1198                        });
1199                        let ptr = self
1200                            .stack
1201                            .pop()
1202                            .expect("empty stack during result lower to memory");
1203                        self.write_params_to_memory(&func.result, ptr, Default::default());
1204                    }
1205
1206                    // Lowering to memory for a guest export
1207                    //
1208                    // For a guest import this is a function defined in
1209                    // wasm, so we're returning a pointer where the
1210                    // value was stored at. Allocate some space here
1211                    // (statically) and then write the result into that
1212                    // memory, returning the pointer at the end.
1213                    (_lower_to_memory @ true, _, variant) => match variant {
1214                        AbiVariant::GuestExport | AbiVariant::GuestExportAsync => {
1215                            let ElementInfo { size, align } =
1216                                self.bindgen.sizes().params(&func.result);
1217                            let ptr = self.bindgen.return_pointer(size, align);
1218                            self.write_params_to_memory(
1219                                &func.result,
1220                                ptr.clone(),
1221                                Default::default(),
1222                            );
1223                            self.stack.push(ptr);
1224                        }
1225                        AbiVariant::GuestImport | AbiVariant::GuestImportAsync => {
1226                            unreachable!(
1227                                "lowering to memory cannot be performed without a return pointer ({async_note} func [{func_name}], variant {variant:#?})",
1228                                async_note = async_.then_some("async").unwrap_or("sync"),
1229                                func_name = func.name,
1230                            )
1231                        }
1232                        AbiVariant::GuestExportAsyncStackful => {
1233                            todo!("stackful exports are not yet supported")
1234                        }
1235                    },
1236                }
1237
1238                // Build and emit the appropriate return
1239                match (variant, async_flat_results) {
1240                    // Async guest imports always return a i32 status code
1241                    (AbiVariant::GuestImport | AbiVariant::GuestImportAsync, None) if async_ => {
1242                        unreachable!("async guest imports must have a return")
1243                    }
1244
1245                    // Async guest imports with results return the status code, not a pointer to any results
1246                    (AbiVariant::GuestImport | AbiVariant::GuestImportAsync, Some(results))
1247                        if async_ =>
1248                    {
1249                        let name = &format!("[task-return]{}", func.name);
1250                        let params = results.as_deref().unwrap_or_default();
1251                        self.emit(&Instruction::AsyncTaskReturn { name, params });
1252                    }
1253
1254                    // All async/non-async cases with results that need to be returned are present here
1255                    //
1256                    // In practice, async imports should not end up here, as the returned result of an
1257                    // async import is *not* a pointer but instead a status code.
1258                    (_, Some(results)) => {
1259                        let name = &format!("[task-return]{}", func.name);
1260                        let params = results.as_deref().unwrap_or(&[WasmType::Pointer]);
1261                        self.emit(&Instruction::AsyncTaskReturn { name, params });
1262                    }
1263
1264                    // All async/non-async cases with no results simply return
1265                    //
1266                    // In practice, an async import will never get here (it always has a result, the error code)
1267                    (_, None) => {
1268                        self.emit(&Instruction::Return {
1269                            func,
1270                            amt: sig.results.len(),
1271                        });
1272                    }
1273                }
1274
1275                self.realloc = None;
1276            }
1277        }
1278
1279        assert!(self.realloc.is_none());
1280
1281        assert!(
1282            self.stack.is_empty(),
1283            "stack has {} items remaining: {:?}",
1284            self.stack.len(),
1285            self.stack,
1286        );
1287    }
1288
1289    fn post_return(&mut self, func: &Function) {
1290        let sig = self.resolve.wasm_signature(AbiVariant::GuestExport, func);
1291
1292        // Currently post-return is only used for lists and lists are always
1293        // returned indirectly through memory due to their flat representation
1294        // having more than one type. Assert that a return pointer is used,
1295        // though, in case this ever changes.
1296        assert!(sig.retptr);
1297
1298        self.emit(&Instruction::GetArg { nth: 0 });
1299        let addr = self.stack.pop().unwrap();
1300
1301        let mut types = Vec::new();
1302        types.extend(func.result);
1303        self.deallocate_in_types(&types, &[addr], true, Deallocate::Lists);
1304
1305        self.emit(&Instruction::Return { func, amt: 0 });
1306    }
1307
1308    fn deallocate_in_types(
1309        &mut self,
1310        types: &[Type],
1311        operands: &[B::Operand],
1312        indirect: bool,
1313        what: Deallocate,
1314    ) {
1315        if indirect {
1316            assert_eq!(operands.len(), 1);
1317            for (offset, ty) in self.bindgen.sizes().field_offsets(types) {
1318                self.deallocate_indirect(ty, operands[0].clone(), offset, what);
1319            }
1320            assert!(
1321                self.stack.is_empty(),
1322                "stack has {} items remaining",
1323                self.stack.len()
1324            );
1325        } else {
1326            let mut operands = operands;
1327            let mut operands_for_ty;
1328            for ty in types {
1329                let types = flat_types(self.resolve, ty, None).unwrap();
1330                (operands_for_ty, operands) = operands.split_at(types.len());
1331                self.stack.extend_from_slice(operands_for_ty);
1332                self.deallocate(ty, what);
1333                assert!(
1334                    self.stack.is_empty(),
1335                    "stack has {} items remaining",
1336                    self.stack.len()
1337                );
1338            }
1339            assert!(operands.is_empty());
1340        }
1341    }
1342
1343    fn emit(&mut self, inst: &Instruction<'_>) {
1344        self.operands.clear();
1345        self.results.clear();
1346
1347        let operands_len = inst.operands_len();
1348        assert!(
1349            self.stack.len() >= operands_len,
1350            "not enough operands on stack for {:?}: have {} need {operands_len}",
1351            inst,
1352            self.stack.len(),
1353        );
1354        self.operands
1355            .extend(self.stack.drain((self.stack.len() - operands_len)..));
1356        self.results.reserve(inst.results_len());
1357
1358        self.bindgen
1359            .emit(self.resolve, inst, &mut self.operands, &mut self.results);
1360
1361        assert_eq!(
1362            self.results.len(),
1363            inst.results_len(),
1364            "{:?} expected {} results, got {}",
1365            inst,
1366            inst.results_len(),
1367            self.results.len()
1368        );
1369        self.stack.append(&mut self.results);
1370    }
1371
1372    fn push_block(&mut self) {
1373        self.bindgen.push_block();
1374    }
1375
1376    fn finish_block(&mut self, size: usize) {
1377        self.operands.clear();
1378        assert!(
1379            size <= self.stack.len(),
1380            "not enough operands on stack for finishing block",
1381        );
1382        self.operands
1383            .extend(self.stack.drain((self.stack.len() - size)..));
1384        self.bindgen.finish_block(&mut self.operands);
1385    }
1386
1387    fn lower(&mut self, ty: &Type) {
1388        use Instruction::*;
1389
1390        match *ty {
1391            Type::Bool => self.emit(&I32FromBool),
1392            Type::S8 => self.emit(&I32FromS8),
1393            Type::U8 => self.emit(&I32FromU8),
1394            Type::S16 => self.emit(&I32FromS16),
1395            Type::U16 => self.emit(&I32FromU16),
1396            Type::S32 => self.emit(&I32FromS32),
1397            Type::U32 => self.emit(&I32FromU32),
1398            Type::S64 => self.emit(&I64FromS64),
1399            Type::U64 => self.emit(&I64FromU64),
1400            Type::Char => self.emit(&I32FromChar),
1401            Type::F32 => self.emit(&CoreF32FromF32),
1402            Type::F64 => self.emit(&CoreF64FromF64),
1403            Type::String => {
1404                let realloc = self.list_realloc();
1405                self.emit(&StringLower { realloc });
1406            }
1407            Type::ErrorContext => self.emit(&ErrorContextLower),
1408            Type::Id(id) => match &self.resolve.types[id].kind {
1409                TypeDefKind::Type(t) => self.lower(t),
1410                TypeDefKind::List(element) => {
1411                    let realloc = self.list_realloc();
1412                    if self.bindgen.is_list_canonical(self.resolve, element) {
1413                        self.emit(&ListCanonLower { element, realloc });
1414                    } else {
1415                        self.push_block();
1416                        self.emit(&IterElem { element });
1417                        self.emit(&IterBasePointer);
1418                        let addr = self.stack.pop().unwrap();
1419                        self.write_to_memory(element, addr, Default::default());
1420                        self.finish_block(0);
1421                        self.emit(&ListLower { element, realloc });
1422                    }
1423                }
1424                TypeDefKind::Handle(handle) => {
1425                    let (Handle::Own(ty) | Handle::Borrow(ty)) = handle;
1426                    self.emit(&HandleLower {
1427                        handle,
1428                        ty: id,
1429                        name: self.resolve.types[*ty].name.as_deref().unwrap(),
1430                    });
1431                }
1432                TypeDefKind::Resource => {
1433                    todo!();
1434                }
1435                TypeDefKind::Record(record) => {
1436                    self.emit(&RecordLower {
1437                        record,
1438                        ty: id,
1439                        name: self.resolve.types[id].name.as_deref().unwrap(),
1440                    });
1441                    let values = self
1442                        .stack
1443                        .drain(self.stack.len() - record.fields.len()..)
1444                        .collect::<Vec<_>>();
1445                    for (field, value) in record.fields.iter().zip(values) {
1446                        self.stack.push(value);
1447                        self.lower(&field.ty);
1448                    }
1449                }
1450                TypeDefKind::Tuple(tuple) => {
1451                    self.emit(&TupleLower { tuple, ty: id });
1452                    let values = self
1453                        .stack
1454                        .drain(self.stack.len() - tuple.types.len()..)
1455                        .collect::<Vec<_>>();
1456                    for (ty, value) in tuple.types.iter().zip(values) {
1457                        self.stack.push(value);
1458                        self.lower(ty);
1459                    }
1460                }
1461
1462                TypeDefKind::Flags(flags) => {
1463                    self.emit(&FlagsLower {
1464                        flags,
1465                        ty: id,
1466                        name: self.resolve.types[id].name.as_ref().unwrap(),
1467                    });
1468                }
1469
1470                TypeDefKind::Variant(v) => {
1471                    let results =
1472                        self.lower_variant_arms(ty, v.cases.iter().map(|c| c.ty.as_ref()));
1473                    self.emit(&VariantLower {
1474                        variant: v,
1475                        ty: id,
1476                        results: &results,
1477                        name: self.resolve.types[id].name.as_deref().unwrap(),
1478                    });
1479                }
1480                TypeDefKind::Enum(enum_) => {
1481                    self.emit(&EnumLower {
1482                        enum_,
1483                        ty: id,
1484                        name: self.resolve.types[id].name.as_deref().unwrap(),
1485                    });
1486                }
1487                TypeDefKind::Option(t) => {
1488                    let results = self.lower_variant_arms(ty, [None, Some(t)]);
1489                    self.emit(&OptionLower {
1490                        payload: t,
1491                        ty: id,
1492                        results: &results,
1493                    });
1494                }
1495                TypeDefKind::Result(r) => {
1496                    let results = self.lower_variant_arms(ty, [r.ok.as_ref(), r.err.as_ref()]);
1497                    self.emit(&ResultLower {
1498                        result: r,
1499                        ty: id,
1500                        results: &results,
1501                    });
1502                }
1503                TypeDefKind::Future(ty) => {
1504                    self.emit(&FutureLower {
1505                        payload: ty,
1506                        ty: id,
1507                    });
1508                }
1509                TypeDefKind::Stream(ty) => {
1510                    self.emit(&StreamLower {
1511                        payload: ty,
1512                        ty: id,
1513                    });
1514                }
1515                TypeDefKind::Unknown => unreachable!(),
1516                TypeDefKind::FixedSizeList(..) => todo!(),
1517            },
1518        }
1519    }
1520
1521    fn lower_variant_arms<'b>(
1522        &mut self,
1523        ty: &Type,
1524        cases: impl IntoIterator<Item = Option<&'b Type>>,
1525    ) -> Vec<WasmType> {
1526        use Instruction::*;
1527        let results = flat_types(self.resolve, ty, None).unwrap();
1528        let mut casts = Vec::new();
1529        for (i, ty) in cases.into_iter().enumerate() {
1530            self.push_block();
1531            self.emit(&VariantPayloadName);
1532            let payload_name = self.stack.pop().unwrap();
1533            self.emit(&I32Const { val: i as i32 });
1534            let mut pushed = 1;
1535            if let Some(ty) = ty {
1536                // Using the payload of this block we lower the type to
1537                // raw wasm values.
1538                self.stack.push(payload_name);
1539                self.lower(ty);
1540
1541                // Determine the types of all the wasm values we just
1542                // pushed, and record how many. If we pushed too few
1543                // then we'll need to push some zeros after this.
1544                let temp = flat_types(self.resolve, ty, None).unwrap();
1545                pushed += temp.len();
1546
1547                // For all the types pushed we may need to insert some
1548                // bitcasts. This will go through and cast everything
1549                // to the right type to ensure all blocks produce the
1550                // same set of results.
1551                casts.truncate(0);
1552                for (actual, expected) in temp.iter().zip(&results[1..]) {
1553                    casts.push(cast(*actual, *expected));
1554                }
1555                if casts.iter().any(|c| *c != Bitcast::None) {
1556                    self.emit(&Bitcasts { casts: &casts });
1557                }
1558            }
1559
1560            // If we haven't pushed enough items in this block to match
1561            // what other variants are pushing then we need to push
1562            // some zeros.
1563            if pushed < results.len() {
1564                self.emit(&ConstZero {
1565                    tys: &results[pushed..],
1566                });
1567            }
1568            self.finish_block(results.len());
1569        }
1570        results
1571    }
1572
1573    fn list_realloc(&self) -> Option<&'static str> {
1574        match self.realloc.expect("realloc should be configured") {
1575            Realloc::None => None,
1576            Realloc::Export(s) => Some(s),
1577        }
1578    }
1579
1580    /// Note that in general everything in this function is the opposite of the
1581    /// `lower` function above. This is intentional and should be kept this way!
1582    fn lift(&mut self, ty: &Type) {
1583        use Instruction::*;
1584
1585        match *ty {
1586            Type::Bool => self.emit(&BoolFromI32),
1587            Type::S8 => self.emit(&S8FromI32),
1588            Type::U8 => self.emit(&U8FromI32),
1589            Type::S16 => self.emit(&S16FromI32),
1590            Type::U16 => self.emit(&U16FromI32),
1591            Type::S32 => self.emit(&S32FromI32),
1592            Type::U32 => self.emit(&U32FromI32),
1593            Type::S64 => self.emit(&S64FromI64),
1594            Type::U64 => self.emit(&U64FromI64),
1595            Type::Char => self.emit(&CharFromI32),
1596            Type::F32 => self.emit(&F32FromCoreF32),
1597            Type::F64 => self.emit(&F64FromCoreF64),
1598            Type::String => self.emit(&StringLift),
1599            Type::ErrorContext => self.emit(&ErrorContextLift),
1600            Type::Id(id) => match &self.resolve.types[id].kind {
1601                TypeDefKind::Type(t) => self.lift(t),
1602                TypeDefKind::List(element) => {
1603                    if self.bindgen.is_list_canonical(self.resolve, element) {
1604                        self.emit(&ListCanonLift { element, ty: id });
1605                    } else {
1606                        self.push_block();
1607                        self.emit(&IterBasePointer);
1608                        let addr = self.stack.pop().unwrap();
1609                        self.read_from_memory(element, addr, Default::default());
1610                        self.finish_block(1);
1611                        self.emit(&ListLift { element, ty: id });
1612                    }
1613                }
1614                TypeDefKind::Handle(handle) => {
1615                    let (Handle::Own(ty) | Handle::Borrow(ty)) = handle;
1616                    self.emit(&HandleLift {
1617                        handle,
1618                        ty: id,
1619                        name: self.resolve.types[*ty].name.as_deref().unwrap(),
1620                    });
1621                }
1622                TypeDefKind::Resource => {
1623                    todo!();
1624                }
1625                TypeDefKind::Record(record) => {
1626                    self.flat_for_each_record_type(
1627                        ty,
1628                        record.fields.iter().map(|f| &f.ty),
1629                        Self::lift,
1630                    );
1631                    self.emit(&RecordLift {
1632                        record,
1633                        ty: id,
1634                        name: self.resolve.types[id].name.as_deref().unwrap(),
1635                    });
1636                }
1637                TypeDefKind::Tuple(tuple) => {
1638                    self.flat_for_each_record_type(ty, tuple.types.iter(), Self::lift);
1639                    self.emit(&TupleLift { tuple, ty: id });
1640                }
1641                TypeDefKind::Flags(flags) => {
1642                    self.emit(&FlagsLift {
1643                        flags,
1644                        ty: id,
1645                        name: self.resolve.types[id].name.as_ref().unwrap(),
1646                    });
1647                }
1648
1649                TypeDefKind::Variant(v) => {
1650                    self.flat_for_each_variant_arm(
1651                        ty,
1652                        true,
1653                        v.cases.iter().map(|c| c.ty.as_ref()),
1654                        Self::lift,
1655                    );
1656                    self.emit(&VariantLift {
1657                        variant: v,
1658                        ty: id,
1659                        name: self.resolve.types[id].name.as_deref().unwrap(),
1660                    });
1661                }
1662
1663                TypeDefKind::Enum(enum_) => {
1664                    self.emit(&EnumLift {
1665                        enum_,
1666                        ty: id,
1667                        name: self.resolve.types[id].name.as_deref().unwrap(),
1668                    });
1669                }
1670
1671                TypeDefKind::Option(t) => {
1672                    self.flat_for_each_variant_arm(ty, true, [None, Some(t)], Self::lift);
1673                    self.emit(&OptionLift { payload: t, ty: id });
1674                }
1675
1676                TypeDefKind::Result(r) => {
1677                    self.flat_for_each_variant_arm(
1678                        ty,
1679                        true,
1680                        [r.ok.as_ref(), r.err.as_ref()],
1681                        Self::lift,
1682                    );
1683                    self.emit(&ResultLift { result: r, ty: id });
1684                }
1685
1686                TypeDefKind::Future(ty) => {
1687                    self.emit(&FutureLift {
1688                        payload: ty,
1689                        ty: id,
1690                    });
1691                }
1692                TypeDefKind::Stream(ty) => {
1693                    self.emit(&StreamLift {
1694                        payload: ty,
1695                        ty: id,
1696                    });
1697                }
1698                TypeDefKind::Unknown => unreachable!(),
1699                TypeDefKind::FixedSizeList(..) => todo!(),
1700            },
1701        }
1702    }
1703
1704    fn flat_for_each_record_type<'b>(
1705        &mut self,
1706        container: &Type,
1707        types: impl Iterator<Item = &'b Type>,
1708        mut iter: impl FnMut(&mut Self, &Type),
1709    ) {
1710        let temp = flat_types(self.resolve, container, None).unwrap();
1711        let mut args = self
1712            .stack
1713            .drain(self.stack.len() - temp.len()..)
1714            .collect::<Vec<_>>();
1715        for ty in types {
1716            let temp = flat_types(self.resolve, ty, None).unwrap();
1717            self.stack.extend(args.drain(..temp.len()));
1718            iter(self, ty);
1719        }
1720    }
1721
1722    fn flat_for_each_variant_arm<'b>(
1723        &mut self,
1724        ty: &Type,
1725        blocks_with_type_have_result: bool,
1726        cases: impl IntoIterator<Item = Option<&'b Type>>,
1727        mut iter: impl FnMut(&mut Self, &Type),
1728    ) {
1729        let params = flat_types(self.resolve, ty, None).unwrap();
1730        let mut casts = Vec::new();
1731        let block_inputs = self
1732            .stack
1733            .drain(self.stack.len() + 1 - params.len()..)
1734            .collect::<Vec<_>>();
1735        for ty in cases {
1736            self.push_block();
1737            if let Some(ty) = ty {
1738                // Push only the values we need for this variant onto
1739                // the stack.
1740                let temp = flat_types(self.resolve, ty, None).unwrap();
1741                self.stack
1742                    .extend(block_inputs[..temp.len()].iter().cloned());
1743
1744                // Cast all the types we have on the stack to the actual
1745                // types needed for this variant, if necessary.
1746                casts.truncate(0);
1747                for (actual, expected) in temp.iter().zip(&params[1..]) {
1748                    casts.push(cast(*expected, *actual));
1749                }
1750                if casts.iter().any(|c| *c != Bitcast::None) {
1751                    self.emit(&Instruction::Bitcasts { casts: &casts });
1752                }
1753
1754                // Then recursively lift this variant's payload.
1755                iter(self, ty);
1756            }
1757            self.finish_block(if blocks_with_type_have_result {
1758                ty.is_some() as usize
1759            } else {
1760                0
1761            });
1762        }
1763    }
1764
1765    fn write_to_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1766        use Instruction::*;
1767
1768        match *ty {
1769            // Builtin types need different flavors of storage instructions
1770            // depending on the size of the value written.
1771            Type::Bool | Type::U8 | Type::S8 => {
1772                self.lower_and_emit(ty, addr, &I32Store8 { offset })
1773            }
1774            Type::U16 | Type::S16 => self.lower_and_emit(ty, addr, &I32Store16 { offset }),
1775            Type::U32 | Type::S32 | Type::Char => {
1776                self.lower_and_emit(ty, addr, &I32Store { offset })
1777            }
1778            Type::U64 | Type::S64 => self.lower_and_emit(ty, addr, &I64Store { offset }),
1779            Type::F32 => self.lower_and_emit(ty, addr, &F32Store { offset }),
1780            Type::F64 => self.lower_and_emit(ty, addr, &F64Store { offset }),
1781            Type::String => self.write_list_to_memory(ty, addr, offset),
1782            Type::ErrorContext => self.lower_and_emit(ty, addr, &I32Store { offset }),
1783
1784            Type::Id(id) => match &self.resolve.types[id].kind {
1785                TypeDefKind::Type(t) => self.write_to_memory(t, addr, offset),
1786                TypeDefKind::List(_) => self.write_list_to_memory(ty, addr, offset),
1787
1788                TypeDefKind::Future(_) | TypeDefKind::Stream(_) | TypeDefKind::Handle(_) => {
1789                    self.lower_and_emit(ty, addr, &I32Store { offset })
1790                }
1791
1792                // Decompose the record into its components and then write all
1793                // the components into memory one-by-one.
1794                TypeDefKind::Record(record) => {
1795                    self.emit(&RecordLower {
1796                        record,
1797                        ty: id,
1798                        name: self.resolve.types[id].name.as_deref().unwrap(),
1799                    });
1800                    self.write_fields_to_memory(record.fields.iter().map(|f| &f.ty), addr, offset);
1801                }
1802                TypeDefKind::Resource => {
1803                    todo!()
1804                }
1805                TypeDefKind::Tuple(tuple) => {
1806                    self.emit(&TupleLower { tuple, ty: id });
1807                    self.write_fields_to_memory(tuple.types.iter(), addr, offset);
1808                }
1809
1810                TypeDefKind::Flags(f) => {
1811                    self.lower(ty);
1812                    match f.repr() {
1813                        FlagsRepr::U8 => {
1814                            self.stack.push(addr);
1815                            self.store_intrepr(offset, Int::U8);
1816                        }
1817                        FlagsRepr::U16 => {
1818                            self.stack.push(addr);
1819                            self.store_intrepr(offset, Int::U16);
1820                        }
1821                        FlagsRepr::U32(n) => {
1822                            for i in (0..n).rev() {
1823                                self.stack.push(addr.clone());
1824                                self.emit(&I32Store {
1825                                    offset: offset.add_bytes(i * 4),
1826                                });
1827                            }
1828                        }
1829                    }
1830                }
1831
1832                // Each case will get its own block, and the first item in each
1833                // case is writing the discriminant. After that if we have a
1834                // payload we write the payload after the discriminant, aligned up
1835                // to the type's alignment.
1836                TypeDefKind::Variant(v) => {
1837                    self.write_variant_arms_to_memory(
1838                        offset,
1839                        addr,
1840                        v.tag(),
1841                        v.cases.iter().map(|c| c.ty.as_ref()),
1842                    );
1843                    self.emit(&VariantLower {
1844                        variant: v,
1845                        ty: id,
1846                        results: &[],
1847                        name: self.resolve.types[id].name.as_deref().unwrap(),
1848                    });
1849                }
1850
1851                TypeDefKind::Option(t) => {
1852                    self.write_variant_arms_to_memory(offset, addr, Int::U8, [None, Some(t)]);
1853                    self.emit(&OptionLower {
1854                        payload: t,
1855                        ty: id,
1856                        results: &[],
1857                    });
1858                }
1859
1860                TypeDefKind::Result(r) => {
1861                    self.write_variant_arms_to_memory(
1862                        offset,
1863                        addr,
1864                        Int::U8,
1865                        [r.ok.as_ref(), r.err.as_ref()],
1866                    );
1867                    self.emit(&ResultLower {
1868                        result: r,
1869                        ty: id,
1870                        results: &[],
1871                    });
1872                }
1873
1874                TypeDefKind::Enum(e) => {
1875                    self.lower(ty);
1876                    self.stack.push(addr);
1877                    self.store_intrepr(offset, e.tag());
1878                }
1879
1880                TypeDefKind::Unknown => unreachable!(),
1881                TypeDefKind::FixedSizeList(..) => todo!(),
1882            },
1883        }
1884    }
1885
1886    fn write_params_to_memory<'b>(
1887        &mut self,
1888        params: impl IntoIterator<Item = &'b Type, IntoIter: ExactSizeIterator>,
1889        addr: B::Operand,
1890        offset: ArchitectureSize,
1891    ) {
1892        self.write_fields_to_memory(params, addr, offset);
1893    }
1894
1895    fn write_variant_arms_to_memory<'b>(
1896        &mut self,
1897        offset: ArchitectureSize,
1898        addr: B::Operand,
1899        tag: Int,
1900        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
1901    ) {
1902        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
1903        for (i, ty) in cases.into_iter().enumerate() {
1904            self.push_block();
1905            self.emit(&Instruction::VariantPayloadName);
1906            let payload_name = self.stack.pop().unwrap();
1907            self.emit(&Instruction::I32Const { val: i as i32 });
1908            self.stack.push(addr.clone());
1909            self.store_intrepr(offset, tag);
1910            if let Some(ty) = ty {
1911                self.stack.push(payload_name.clone());
1912                self.write_to_memory(ty, addr.clone(), payload_offset);
1913            }
1914            self.finish_block(0);
1915        }
1916    }
1917
1918    fn write_list_to_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1919        // After lowering the list there's two i32 values on the stack
1920        // which we write into memory, writing the pointer into the low address
1921        // and the length into the high address.
1922        self.lower(ty);
1923        self.stack.push(addr.clone());
1924        self.emit(&Instruction::LengthStore {
1925            offset: offset + self.bindgen.sizes().align(ty).into(),
1926        });
1927        self.stack.push(addr);
1928        self.emit(&Instruction::PointerStore { offset });
1929    }
1930
1931    fn write_fields_to_memory<'b>(
1932        &mut self,
1933        tys: impl IntoIterator<Item = &'b Type, IntoIter: ExactSizeIterator>,
1934        addr: B::Operand,
1935        offset: ArchitectureSize,
1936    ) {
1937        let tys = tys.into_iter();
1938        let fields = self
1939            .stack
1940            .drain(self.stack.len() - tys.len()..)
1941            .collect::<Vec<_>>();
1942        for ((field_offset, ty), op) in self
1943            .bindgen
1944            .sizes()
1945            .field_offsets(tys)
1946            .into_iter()
1947            .zip(fields)
1948        {
1949            self.stack.push(op);
1950            self.write_to_memory(ty, addr.clone(), offset + (field_offset));
1951        }
1952    }
1953
1954    fn lower_and_emit(&mut self, ty: &Type, addr: B::Operand, instr: &Instruction) {
1955        self.lower(ty);
1956        self.stack.push(addr);
1957        self.emit(instr);
1958    }
1959
1960    fn read_from_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
1961        use Instruction::*;
1962
1963        match *ty {
1964            Type::Bool => self.emit_and_lift(ty, addr, &I32Load8U { offset }),
1965            Type::U8 => self.emit_and_lift(ty, addr, &I32Load8U { offset }),
1966            Type::S8 => self.emit_and_lift(ty, addr, &I32Load8S { offset }),
1967            Type::U16 => self.emit_and_lift(ty, addr, &I32Load16U { offset }),
1968            Type::S16 => self.emit_and_lift(ty, addr, &I32Load16S { offset }),
1969            Type::U32 | Type::S32 | Type::Char => self.emit_and_lift(ty, addr, &I32Load { offset }),
1970            Type::U64 | Type::S64 => self.emit_and_lift(ty, addr, &I64Load { offset }),
1971            Type::F32 => self.emit_and_lift(ty, addr, &F32Load { offset }),
1972            Type::F64 => self.emit_and_lift(ty, addr, &F64Load { offset }),
1973            Type::String => self.read_list_from_memory(ty, addr, offset),
1974            Type::ErrorContext => self.emit_and_lift(ty, addr, &I32Load { offset }),
1975
1976            Type::Id(id) => match &self.resolve.types[id].kind {
1977                TypeDefKind::Type(t) => self.read_from_memory(t, addr, offset),
1978
1979                TypeDefKind::List(_) => self.read_list_from_memory(ty, addr, offset),
1980
1981                TypeDefKind::Future(_) | TypeDefKind::Stream(_) | TypeDefKind::Handle(_) => {
1982                    self.emit_and_lift(ty, addr, &I32Load { offset })
1983                }
1984
1985                TypeDefKind::Resource => {
1986                    todo!();
1987                }
1988
1989                // Read and lift each field individually, adjusting the offset
1990                // as we go along, then aggregate all the fields into the
1991                // record.
1992                TypeDefKind::Record(record) => {
1993                    self.read_fields_from_memory(record.fields.iter().map(|f| &f.ty), addr, offset);
1994                    self.emit(&RecordLift {
1995                        record,
1996                        ty: id,
1997                        name: self.resolve.types[id].name.as_deref().unwrap(),
1998                    });
1999                }
2000
2001                TypeDefKind::Tuple(tuple) => {
2002                    self.read_fields_from_memory(&tuple.types, addr, offset);
2003                    self.emit(&TupleLift { tuple, ty: id });
2004                }
2005
2006                TypeDefKind::Flags(f) => {
2007                    match f.repr() {
2008                        FlagsRepr::U8 => {
2009                            self.stack.push(addr);
2010                            self.load_intrepr(offset, Int::U8);
2011                        }
2012                        FlagsRepr::U16 => {
2013                            self.stack.push(addr);
2014                            self.load_intrepr(offset, Int::U16);
2015                        }
2016                        FlagsRepr::U32(n) => {
2017                            for i in 0..n {
2018                                self.stack.push(addr.clone());
2019                                self.emit(&I32Load {
2020                                    offset: offset.add_bytes(i * 4),
2021                                });
2022                            }
2023                        }
2024                    }
2025                    self.lift(ty);
2026                }
2027
2028                // Each case will get its own block, and we'll dispatch to the
2029                // right block based on the `i32.load` we initially perform. Each
2030                // individual block is pretty simple and just reads the payload type
2031                // from the corresponding offset if one is available.
2032                TypeDefKind::Variant(variant) => {
2033                    self.read_variant_arms_from_memory(
2034                        offset,
2035                        addr,
2036                        variant.tag(),
2037                        variant.cases.iter().map(|c| c.ty.as_ref()),
2038                    );
2039                    self.emit(&VariantLift {
2040                        variant,
2041                        ty: id,
2042                        name: self.resolve.types[id].name.as_deref().unwrap(),
2043                    });
2044                }
2045
2046                TypeDefKind::Option(t) => {
2047                    self.read_variant_arms_from_memory(offset, addr, Int::U8, [None, Some(t)]);
2048                    self.emit(&OptionLift { payload: t, ty: id });
2049                }
2050
2051                TypeDefKind::Result(r) => {
2052                    self.read_variant_arms_from_memory(
2053                        offset,
2054                        addr,
2055                        Int::U8,
2056                        [r.ok.as_ref(), r.err.as_ref()],
2057                    );
2058                    self.emit(&ResultLift { result: r, ty: id });
2059                }
2060
2061                TypeDefKind::Enum(e) => {
2062                    self.stack.push(addr.clone());
2063                    self.load_intrepr(offset, e.tag());
2064                    self.lift(ty);
2065                }
2066
2067                TypeDefKind::Unknown => unreachable!(),
2068                TypeDefKind::FixedSizeList(..) => todo!(),
2069            },
2070        }
2071    }
2072
2073    fn read_results_from_memory(
2074        &mut self,
2075        result: &Option<Type>,
2076        addr: B::Operand,
2077        offset: ArchitectureSize,
2078    ) {
2079        self.read_fields_from_memory(result, addr, offset)
2080    }
2081
2082    fn read_variant_arms_from_memory<'b>(
2083        &mut self,
2084        offset: ArchitectureSize,
2085        addr: B::Operand,
2086        tag: Int,
2087        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
2088    ) {
2089        self.stack.push(addr.clone());
2090        self.load_intrepr(offset, tag);
2091        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
2092        for ty in cases {
2093            self.push_block();
2094            if let Some(ty) = ty {
2095                self.read_from_memory(ty, addr.clone(), payload_offset);
2096            }
2097            self.finish_block(ty.is_some() as usize);
2098        }
2099    }
2100
2101    fn read_list_from_memory(&mut self, ty: &Type, addr: B::Operand, offset: ArchitectureSize) {
2102        // Read the pointer/len and then perform the standard lifting
2103        // proceses.
2104        self.stack.push(addr.clone());
2105        self.emit(&Instruction::PointerLoad { offset });
2106        self.stack.push(addr);
2107        self.emit(&Instruction::LengthLoad {
2108            offset: offset + self.bindgen.sizes().align(ty).into(),
2109        });
2110        self.lift(ty);
2111    }
2112
2113    fn read_fields_from_memory<'b>(
2114        &mut self,
2115        tys: impl IntoIterator<Item = &'b Type>,
2116        addr: B::Operand,
2117        offset: ArchitectureSize,
2118    ) {
2119        for (field_offset, ty) in self.bindgen.sizes().field_offsets(tys).iter() {
2120            self.read_from_memory(ty, addr.clone(), offset + (*field_offset));
2121        }
2122    }
2123
2124    fn emit_and_lift(&mut self, ty: &Type, addr: B::Operand, instr: &Instruction) {
2125        self.stack.push(addr);
2126        self.emit(instr);
2127        self.lift(ty);
2128    }
2129
2130    fn load_intrepr(&mut self, offset: ArchitectureSize, repr: Int) {
2131        self.emit(&match repr {
2132            Int::U64 => Instruction::I64Load { offset },
2133            Int::U32 => Instruction::I32Load { offset },
2134            Int::U16 => Instruction::I32Load16U { offset },
2135            Int::U8 => Instruction::I32Load8U { offset },
2136        });
2137    }
2138
2139    fn store_intrepr(&mut self, offset: ArchitectureSize, repr: Int) {
2140        self.emit(&match repr {
2141            Int::U64 => Instruction::I64Store { offset },
2142            Int::U32 => Instruction::I32Store { offset },
2143            Int::U16 => Instruction::I32Store16 { offset },
2144            Int::U8 => Instruction::I32Store8 { offset },
2145        });
2146    }
2147
2148    /// Runs the deallocation of `ty` for the operands currently on
2149    /// `self.stack`.
2150    ///
2151    /// This will pop the ABI items of `ty` from `self.stack`.
2152    fn deallocate(&mut self, ty: &Type, what: Deallocate) {
2153        use Instruction::*;
2154
2155        match *ty {
2156            Type::String => {
2157                self.emit(&Instruction::GuestDeallocateString);
2158            }
2159
2160            Type::Bool
2161            | Type::U8
2162            | Type::S8
2163            | Type::U16
2164            | Type::S16
2165            | Type::U32
2166            | Type::S32
2167            | Type::Char
2168            | Type::U64
2169            | Type::S64
2170            | Type::F32
2171            | Type::F64
2172            | Type::ErrorContext => {
2173                // No deallocation necessary, just discard the operand on the
2174                // stack.
2175                self.stack.pop().unwrap();
2176            }
2177
2178            Type::Id(id) => match &self.resolve.types[id].kind {
2179                TypeDefKind::Type(t) => self.deallocate(t, what),
2180
2181                TypeDefKind::List(element) => {
2182                    self.push_block();
2183                    self.emit(&IterBasePointer);
2184                    let elemaddr = self.stack.pop().unwrap();
2185                    self.deallocate_indirect(element, elemaddr, Default::default(), what);
2186                    self.finish_block(0);
2187
2188                    self.emit(&Instruction::GuestDeallocateList { element });
2189                }
2190
2191                TypeDefKind::Handle(Handle::Own(_))
2192                | TypeDefKind::Future(_)
2193                | TypeDefKind::Stream(_)
2194                    if what.handles() =>
2195                {
2196                    self.lift(ty);
2197                    self.emit(&DropHandle { ty });
2198                }
2199
2200                TypeDefKind::Record(record) => {
2201                    self.flat_for_each_record_type(
2202                        ty,
2203                        record.fields.iter().map(|f| &f.ty),
2204                        |me, ty| me.deallocate(ty, what),
2205                    );
2206                }
2207
2208                TypeDefKind::Tuple(tuple) => {
2209                    self.flat_for_each_record_type(ty, tuple.types.iter(), |me, ty| {
2210                        me.deallocate(ty, what)
2211                    });
2212                }
2213
2214                TypeDefKind::Variant(variant) => {
2215                    self.flat_for_each_variant_arm(
2216                        ty,
2217                        false,
2218                        variant.cases.iter().map(|c| c.ty.as_ref()),
2219                        |me, ty| me.deallocate(ty, what),
2220                    );
2221                    self.emit(&GuestDeallocateVariant {
2222                        blocks: variant.cases.len(),
2223                    });
2224                }
2225
2226                TypeDefKind::Option(t) => {
2227                    self.flat_for_each_variant_arm(ty, false, [None, Some(t)], |me, ty| {
2228                        me.deallocate(ty, what)
2229                    });
2230                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2231                }
2232
2233                TypeDefKind::Result(e) => {
2234                    self.flat_for_each_variant_arm(
2235                        ty,
2236                        false,
2237                        [e.ok.as_ref(), e.err.as_ref()],
2238                        |me, ty| me.deallocate(ty, what),
2239                    );
2240                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2241                }
2242
2243                // discard the operand on the stack, otherwise nothing to free.
2244                TypeDefKind::Flags(_)
2245                | TypeDefKind::Enum(_)
2246                | TypeDefKind::Future(_)
2247                | TypeDefKind::Stream(_)
2248                | TypeDefKind::Handle(Handle::Own(_))
2249                | TypeDefKind::Handle(Handle::Borrow(_)) => {
2250                    self.stack.pop().unwrap();
2251                }
2252
2253                TypeDefKind::Resource => unreachable!(),
2254                TypeDefKind::Unknown => unreachable!(),
2255
2256                TypeDefKind::FixedSizeList(..) => todo!(),
2257            },
2258        }
2259    }
2260
2261    fn deallocate_indirect(
2262        &mut self,
2263        ty: &Type,
2264        addr: B::Operand,
2265        offset: ArchitectureSize,
2266        what: Deallocate,
2267    ) {
2268        use Instruction::*;
2269
2270        // No need to execute any instructions if this type itself doesn't
2271        // require any form of post-return.
2272        if !needs_deallocate(self.resolve, ty, what) {
2273            return;
2274        }
2275
2276        match *ty {
2277            Type::String => {
2278                self.stack.push(addr.clone());
2279                self.emit(&Instruction::PointerLoad { offset });
2280                self.stack.push(addr);
2281                self.emit(&Instruction::LengthLoad {
2282                    offset: offset + self.bindgen.sizes().align(ty).into(),
2283                });
2284                self.deallocate(ty, what);
2285            }
2286
2287            Type::Bool
2288            | Type::U8
2289            | Type::S8
2290            | Type::U16
2291            | Type::S16
2292            | Type::U32
2293            | Type::S32
2294            | Type::Char
2295            | Type::U64
2296            | Type::S64
2297            | Type::F32
2298            | Type::F64
2299            | Type::ErrorContext => {}
2300
2301            Type::Id(id) => match &self.resolve.types[id].kind {
2302                TypeDefKind::Type(t) => self.deallocate_indirect(t, addr, offset, what),
2303
2304                TypeDefKind::List(_) => {
2305                    self.stack.push(addr.clone());
2306                    self.emit(&Instruction::PointerLoad { offset });
2307                    self.stack.push(addr);
2308                    self.emit(&Instruction::LengthLoad {
2309                        offset: offset + self.bindgen.sizes().align(ty).into(),
2310                    });
2311
2312                    self.deallocate(ty, what);
2313                }
2314
2315                TypeDefKind::Handle(Handle::Own(_))
2316                | TypeDefKind::Future(_)
2317                | TypeDefKind::Stream(_)
2318                    if what.handles() =>
2319                {
2320                    self.read_from_memory(ty, addr, offset);
2321                    self.emit(&DropHandle { ty });
2322                }
2323
2324                TypeDefKind::Handle(Handle::Own(_)) => unreachable!(),
2325                TypeDefKind::Handle(Handle::Borrow(_)) => unreachable!(),
2326                TypeDefKind::Resource => unreachable!(),
2327
2328                TypeDefKind::Record(record) => {
2329                    self.deallocate_indirect_fields(
2330                        &record.fields.iter().map(|f| f.ty).collect::<Vec<_>>(),
2331                        addr,
2332                        offset,
2333                        what,
2334                    );
2335                }
2336
2337                TypeDefKind::Tuple(tuple) => {
2338                    self.deallocate_indirect_fields(&tuple.types, addr, offset, what);
2339                }
2340
2341                TypeDefKind::Flags(_) => {}
2342
2343                TypeDefKind::Variant(variant) => {
2344                    self.deallocate_indirect_variant(
2345                        offset,
2346                        addr,
2347                        variant.tag(),
2348                        variant.cases.iter().map(|c| c.ty.as_ref()),
2349                        what,
2350                    );
2351                    self.emit(&GuestDeallocateVariant {
2352                        blocks: variant.cases.len(),
2353                    });
2354                }
2355
2356                TypeDefKind::Option(t) => {
2357                    self.deallocate_indirect_variant(offset, addr, Int::U8, [None, Some(t)], what);
2358                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2359                }
2360
2361                TypeDefKind::Result(e) => {
2362                    self.deallocate_indirect_variant(
2363                        offset,
2364                        addr,
2365                        Int::U8,
2366                        [e.ok.as_ref(), e.err.as_ref()],
2367                        what,
2368                    );
2369                    self.emit(&GuestDeallocateVariant { blocks: 2 });
2370                }
2371
2372                TypeDefKind::Enum(_) => {}
2373
2374                TypeDefKind::Future(_) => unreachable!(),
2375                TypeDefKind::Stream(_) => unreachable!(),
2376                TypeDefKind::Unknown => unreachable!(),
2377                TypeDefKind::FixedSizeList(..) => todo!(),
2378            },
2379        }
2380    }
2381
2382    fn deallocate_indirect_variant<'b>(
2383        &mut self,
2384        offset: ArchitectureSize,
2385        addr: B::Operand,
2386        tag: Int,
2387        cases: impl IntoIterator<Item = Option<&'b Type>> + Clone,
2388        what: Deallocate,
2389    ) {
2390        self.stack.push(addr.clone());
2391        self.load_intrepr(offset, tag);
2392        let payload_offset = offset + (self.bindgen.sizes().payload_offset(tag, cases.clone()));
2393        for ty in cases {
2394            self.push_block();
2395            if let Some(ty) = ty {
2396                self.deallocate_indirect(ty, addr.clone(), payload_offset, what);
2397            }
2398            self.finish_block(0);
2399        }
2400    }
2401
2402    fn deallocate_indirect_fields(
2403        &mut self,
2404        tys: &[Type],
2405        addr: B::Operand,
2406        offset: ArchitectureSize,
2407        what: Deallocate,
2408    ) {
2409        for (field_offset, ty) in self.bindgen.sizes().field_offsets(tys) {
2410            self.deallocate_indirect(ty, addr.clone(), offset + (field_offset), what);
2411        }
2412    }
2413}
2414
2415fn cast(from: WasmType, to: WasmType) -> Bitcast {
2416    use WasmType::*;
2417
2418    match (from, to) {
2419        (I32, I32)
2420        | (I64, I64)
2421        | (F32, F32)
2422        | (F64, F64)
2423        | (Pointer, Pointer)
2424        | (PointerOrI64, PointerOrI64)
2425        | (Length, Length) => Bitcast::None,
2426
2427        (I32, I64) => Bitcast::I32ToI64,
2428        (F32, I32) => Bitcast::F32ToI32,
2429        (F64, I64) => Bitcast::F64ToI64,
2430
2431        (I64, I32) => Bitcast::I64ToI32,
2432        (I32, F32) => Bitcast::I32ToF32,
2433        (I64, F64) => Bitcast::I64ToF64,
2434
2435        (F32, I64) => Bitcast::F32ToI64,
2436        (I64, F32) => Bitcast::I64ToF32,
2437
2438        (I64, PointerOrI64) => Bitcast::I64ToP64,
2439        (Pointer, PointerOrI64) => Bitcast::PToP64,
2440        (_, PointerOrI64) => {
2441            Bitcast::Sequence(Box::new([cast(from, I64), cast(I64, PointerOrI64)]))
2442        }
2443
2444        (PointerOrI64, I64) => Bitcast::P64ToI64,
2445        (PointerOrI64, Pointer) => Bitcast::P64ToP,
2446        (PointerOrI64, _) => Bitcast::Sequence(Box::new([cast(PointerOrI64, I64), cast(I64, to)])),
2447
2448        (I32, Pointer) => Bitcast::I32ToP,
2449        (Pointer, I32) => Bitcast::PToI32,
2450        (I32, Length) => Bitcast::I32ToL,
2451        (Length, I32) => Bitcast::LToI32,
2452        (I64, Length) => Bitcast::I64ToL,
2453        (Length, I64) => Bitcast::LToI64,
2454        (Pointer, Length) => Bitcast::PToL,
2455        (Length, Pointer) => Bitcast::LToP,
2456
2457        (F32, Pointer | Length) => Bitcast::Sequence(Box::new([cast(F32, I32), cast(I32, to)])),
2458        (Pointer | Length, F32) => Bitcast::Sequence(Box::new([cast(from, I32), cast(I32, F32)])),
2459
2460        (F32, F64)
2461        | (F64, F32)
2462        | (F64, I32)
2463        | (I32, F64)
2464        | (Pointer | Length, I64 | F64)
2465        | (I64 | F64, Pointer | Length) => {
2466            unreachable!("Don't know how to bitcast from {:?} to {:?}", from, to);
2467        }
2468    }
2469}
2470
2471/// Flatten types in a given type
2472///
2473/// It is sometimes necessary to restrict the number of max parameters dynamically,
2474/// for example during an async guest import call (flat params are limited to 4)
2475fn flat_types(resolve: &Resolve, ty: &Type, max_params: Option<usize>) -> Option<Vec<WasmType>> {
2476    let mut storage =
2477        iter::repeat_n(WasmType::I32, max_params.unwrap_or(MAX_FLAT_PARAMS)).collect::<Vec<_>>();
2478    let mut flat = FlatTypes::new(storage.as_mut_slice());
2479    if resolve.push_flat(ty, &mut flat) {
2480        Some(flat.to_vec())
2481    } else {
2482        None
2483    }
2484}