Skip to main content

asmkit/x86/features/
AVX2.rs

1use super::super::opcodes::*;
2use crate::core::emitter::*;
3use crate::core::operand::*;
4use crate::x86::assembler::*;
5use crate::x86::operands::*;
6
7/// A dummy operand that represents no register. Here just for simplicity.
8const NOREG: Operand = Operand::new();
9
10/// `VBROADCASTI128`.
11///
12/// Supported operand variants:
13///
14/// ```text
15/// +---+----------+
16/// | # | Operands |
17/// +---+----------+
18/// | 1 | Ymm, Mem |
19/// +---+----------+
20/// ```
21pub trait Vbroadcasti128Emitter<A, B> {
22    fn vbroadcasti128(&mut self, op0: A, op1: B);
23}
24
25impl<'a> Vbroadcasti128Emitter<Ymm, Mem> for Assembler<'a> {
26    fn vbroadcasti128(&mut self, op0: Ymm, op1: Mem) {
27        self.emit(
28            VBROADCASTI128RM,
29            op0.as_operand(),
30            op1.as_operand(),
31            &NOREG,
32            &NOREG,
33        );
34    }
35}
36
37/// `VEXTRACTI128`.
38///
39/// Supported operand variants:
40///
41/// ```text
42/// +---+---------------+
43/// | # | Operands      |
44/// +---+---------------+
45/// | 1 | Mem, Ymm, Imm |
46/// | 2 | Xmm, Ymm, Imm |
47/// +---+---------------+
48/// ```
49pub trait Vextracti128Emitter<A, B, C> {
50    fn vextracti128(&mut self, op0: A, op1: B, op2: C);
51}
52
53impl<'a> Vextracti128Emitter<Xmm, Ymm, Imm> for Assembler<'a> {
54    fn vextracti128(&mut self, op0: Xmm, op1: Ymm, op2: Imm) {
55        self.emit(
56            VEXTRACTI128RRI,
57            op0.as_operand(),
58            op1.as_operand(),
59            op2.as_operand(),
60            &NOREG,
61        );
62    }
63}
64
65impl<'a> Vextracti128Emitter<Mem, Ymm, Imm> for Assembler<'a> {
66    fn vextracti128(&mut self, op0: Mem, op1: Ymm, op2: Imm) {
67        self.emit(
68            VEXTRACTI128MRI,
69            op0.as_operand(),
70            op1.as_operand(),
71            op2.as_operand(),
72            &NOREG,
73        );
74    }
75}
76
77/// `VGATHERDPD`.
78///
79/// Supported operand variants:
80///
81/// ```text
82/// +---+---------------+
83/// | # | Operands      |
84/// +---+---------------+
85/// | 1 | Xmm, Mem, Xmm |
86/// | 2 | Ymm, Mem, Ymm |
87/// +---+---------------+
88/// ```
89pub trait VgatherdpdEmitter_3<A, B, C> {
90    fn vgatherdpd_3(&mut self, op0: A, op1: B, op2: C);
91}
92
93impl<'a> VgatherdpdEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
94    fn vgatherdpd_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
95        self.emit(
96            VGATHERDPD128RMR,
97            op0.as_operand(),
98            op1.as_operand(),
99            op2.as_operand(),
100            &NOREG,
101        );
102    }
103}
104
105impl<'a> VgatherdpdEmitter_3<Ymm, Mem, Ymm> for Assembler<'a> {
106    fn vgatherdpd_3(&mut self, op0: Ymm, op1: Mem, op2: Ymm) {
107        self.emit(
108            VGATHERDPD256RMR,
109            op0.as_operand(),
110            op1.as_operand(),
111            op2.as_operand(),
112            &NOREG,
113        );
114    }
115}
116
117/// `VGATHERDPS`.
118///
119/// Supported operand variants:
120///
121/// ```text
122/// +---+---------------+
123/// | # | Operands      |
124/// +---+---------------+
125/// | 1 | Xmm, Mem, Xmm |
126/// | 2 | Ymm, Mem, Ymm |
127/// +---+---------------+
128/// ```
129pub trait VgatherdpsEmitter_3<A, B, C> {
130    fn vgatherdps_3(&mut self, op0: A, op1: B, op2: C);
131}
132
133impl<'a> VgatherdpsEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
134    fn vgatherdps_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
135        self.emit(
136            VGATHERDPS128RMR,
137            op0.as_operand(),
138            op1.as_operand(),
139            op2.as_operand(),
140            &NOREG,
141        );
142    }
143}
144
145impl<'a> VgatherdpsEmitter_3<Ymm, Mem, Ymm> for Assembler<'a> {
146    fn vgatherdps_3(&mut self, op0: Ymm, op1: Mem, op2: Ymm) {
147        self.emit(
148            VGATHERDPS256RMR,
149            op0.as_operand(),
150            op1.as_operand(),
151            op2.as_operand(),
152            &NOREG,
153        );
154    }
155}
156
157/// `VGATHERQPD`.
158///
159/// Supported operand variants:
160///
161/// ```text
162/// +---+---------------+
163/// | # | Operands      |
164/// +---+---------------+
165/// | 1 | Xmm, Mem, Xmm |
166/// | 2 | Ymm, Mem, Ymm |
167/// +---+---------------+
168/// ```
169pub trait VgatherqpdEmitter_3<A, B, C> {
170    fn vgatherqpd_3(&mut self, op0: A, op1: B, op2: C);
171}
172
173impl<'a> VgatherqpdEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
174    fn vgatherqpd_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
175        self.emit(
176            VGATHERQPD128RMR,
177            op0.as_operand(),
178            op1.as_operand(),
179            op2.as_operand(),
180            &NOREG,
181        );
182    }
183}
184
185impl<'a> VgatherqpdEmitter_3<Ymm, Mem, Ymm> for Assembler<'a> {
186    fn vgatherqpd_3(&mut self, op0: Ymm, op1: Mem, op2: Ymm) {
187        self.emit(
188            VGATHERQPD256RMR,
189            op0.as_operand(),
190            op1.as_operand(),
191            op2.as_operand(),
192            &NOREG,
193        );
194    }
195}
196
197/// `VGATHERQPS`.
198///
199/// Supported operand variants:
200///
201/// ```text
202/// +---+---------------+
203/// | # | Operands      |
204/// +---+---------------+
205/// | 1 | Xmm, Mem, Xmm |
206/// +---+---------------+
207/// ```
208pub trait VgatherqpsEmitter_3<A, B, C> {
209    fn vgatherqps_3(&mut self, op0: A, op1: B, op2: C);
210}
211
212impl<'a> VgatherqpsEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
213    fn vgatherqps_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
214        self.emit(
215            VGATHERQPS128RMR,
216            op0.as_operand(),
217            op1.as_operand(),
218            op2.as_operand(),
219            &NOREG,
220        );
221    }
222}
223
224/// `VINSERTI128`.
225///
226/// Supported operand variants:
227///
228/// ```text
229/// +---+--------------------+
230/// | # | Operands           |
231/// +---+--------------------+
232/// | 1 | Ymm, Ymm, Mem, Imm |
233/// | 2 | Ymm, Ymm, Xmm, Imm |
234/// +---+--------------------+
235/// ```
236pub trait Vinserti128Emitter<A, B, C, D> {
237    fn vinserti128(&mut self, op0: A, op1: B, op2: C, op3: D);
238}
239
240impl<'a> Vinserti128Emitter<Ymm, Ymm, Xmm, Imm> for Assembler<'a> {
241    fn vinserti128(&mut self, op0: Ymm, op1: Ymm, op2: Xmm, op3: Imm) {
242        self.emit(
243            VINSERTI128RRRI,
244            op0.as_operand(),
245            op1.as_operand(),
246            op2.as_operand(),
247            op3.as_operand(),
248        );
249    }
250}
251
252impl<'a> Vinserti128Emitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
253    fn vinserti128(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
254        self.emit(
255            VINSERTI128RRMI,
256            op0.as_operand(),
257            op1.as_operand(),
258            op2.as_operand(),
259            op3.as_operand(),
260        );
261    }
262}
263
264/// `VPBLENDD`.
265///
266/// Supported operand variants:
267///
268/// ```text
269/// +---+--------------------+
270/// | # | Operands           |
271/// +---+--------------------+
272/// | 1 | Xmm, Xmm, Mem, Imm |
273/// | 2 | Xmm, Xmm, Xmm, Imm |
274/// | 3 | Ymm, Ymm, Mem, Imm |
275/// | 4 | Ymm, Ymm, Ymm, Imm |
276/// +---+--------------------+
277/// ```
278pub trait VpblenddEmitter<A, B, C, D> {
279    fn vpblendd(&mut self, op0: A, op1: B, op2: C, op3: D);
280}
281
282impl<'a> VpblenddEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
283    fn vpblendd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
284        self.emit(
285            VPBLENDD128RRRI,
286            op0.as_operand(),
287            op1.as_operand(),
288            op2.as_operand(),
289            op3.as_operand(),
290        );
291    }
292}
293
294impl<'a> VpblenddEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
295    fn vpblendd(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
296        self.emit(
297            VPBLENDD128RRMI,
298            op0.as_operand(),
299            op1.as_operand(),
300            op2.as_operand(),
301            op3.as_operand(),
302        );
303    }
304}
305
306impl<'a> VpblenddEmitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
307    fn vpblendd(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
308        self.emit(
309            VPBLENDD256RRRI,
310            op0.as_operand(),
311            op1.as_operand(),
312            op2.as_operand(),
313            op3.as_operand(),
314        );
315    }
316}
317
318impl<'a> VpblenddEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
319    fn vpblendd(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
320        self.emit(
321            VPBLENDD256RRMI,
322            op0.as_operand(),
323            op1.as_operand(),
324            op2.as_operand(),
325            op3.as_operand(),
326        );
327    }
328}
329
330/// `VPERM2I128`.
331///
332/// Supported operand variants:
333///
334/// ```text
335/// +---+--------------------+
336/// | # | Operands           |
337/// +---+--------------------+
338/// | 1 | Ymm, Ymm, Mem, Imm |
339/// | 2 | Ymm, Ymm, Ymm, Imm |
340/// +---+--------------------+
341/// ```
342pub trait Vperm2i128Emitter<A, B, C, D> {
343    fn vperm2i128(&mut self, op0: A, op1: B, op2: C, op3: D);
344}
345
346impl<'a> Vperm2i128Emitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
347    fn vperm2i128(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
348        self.emit(
349            VPERM2I128_256RRRI,
350            op0.as_operand(),
351            op1.as_operand(),
352            op2.as_operand(),
353            op3.as_operand(),
354        );
355    }
356}
357
358impl<'a> Vperm2i128Emitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
359    fn vperm2i128(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
360        self.emit(
361            VPERM2I128_256RRMI,
362            op0.as_operand(),
363            op1.as_operand(),
364            op2.as_operand(),
365            op3.as_operand(),
366        );
367    }
368}
369
370/// `VPGATHERDD`.
371///
372/// Supported operand variants:
373///
374/// ```text
375/// +---+---------------+
376/// | # | Operands      |
377/// +---+---------------+
378/// | 1 | Xmm, Mem, Xmm |
379/// | 2 | Ymm, Mem, Ymm |
380/// +---+---------------+
381/// ```
382pub trait VpgatherddEmitter_3<A, B, C> {
383    fn vpgatherdd_3(&mut self, op0: A, op1: B, op2: C);
384}
385
386impl<'a> VpgatherddEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
387    fn vpgatherdd_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
388        self.emit(
389            VPGATHERDD128RMR,
390            op0.as_operand(),
391            op1.as_operand(),
392            op2.as_operand(),
393            &NOREG,
394        );
395    }
396}
397
398impl<'a> VpgatherddEmitter_3<Ymm, Mem, Ymm> for Assembler<'a> {
399    fn vpgatherdd_3(&mut self, op0: Ymm, op1: Mem, op2: Ymm) {
400        self.emit(
401            VPGATHERDD256RMR,
402            op0.as_operand(),
403            op1.as_operand(),
404            op2.as_operand(),
405            &NOREG,
406        );
407    }
408}
409
410/// `VPGATHERDQ`.
411///
412/// Supported operand variants:
413///
414/// ```text
415/// +---+---------------+
416/// | # | Operands      |
417/// +---+---------------+
418/// | 1 | Xmm, Mem, Xmm |
419/// | 2 | Ymm, Mem, Ymm |
420/// +---+---------------+
421/// ```
422pub trait VpgatherdqEmitter_3<A, B, C> {
423    fn vpgatherdq_3(&mut self, op0: A, op1: B, op2: C);
424}
425
426impl<'a> VpgatherdqEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
427    fn vpgatherdq_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
428        self.emit(
429            VPGATHERDQ128RMR,
430            op0.as_operand(),
431            op1.as_operand(),
432            op2.as_operand(),
433            &NOREG,
434        );
435    }
436}
437
438impl<'a> VpgatherdqEmitter_3<Ymm, Mem, Ymm> for Assembler<'a> {
439    fn vpgatherdq_3(&mut self, op0: Ymm, op1: Mem, op2: Ymm) {
440        self.emit(
441            VPGATHERDQ256RMR,
442            op0.as_operand(),
443            op1.as_operand(),
444            op2.as_operand(),
445            &NOREG,
446        );
447    }
448}
449
450/// `VPGATHERQD`.
451///
452/// Supported operand variants:
453///
454/// ```text
455/// +---+---------------+
456/// | # | Operands      |
457/// +---+---------------+
458/// | 1 | Xmm, Mem, Xmm |
459/// +---+---------------+
460/// ```
461pub trait VpgatherqdEmitter_3<A, B, C> {
462    fn vpgatherqd_3(&mut self, op0: A, op1: B, op2: C);
463}
464
465impl<'a> VpgatherqdEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
466    fn vpgatherqd_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
467        self.emit(
468            VPGATHERQD128RMR,
469            op0.as_operand(),
470            op1.as_operand(),
471            op2.as_operand(),
472            &NOREG,
473        );
474    }
475}
476
477/// `VPGATHERQQ`.
478///
479/// Supported operand variants:
480///
481/// ```text
482/// +---+---------------+
483/// | # | Operands      |
484/// +---+---------------+
485/// | 1 | Xmm, Mem, Xmm |
486/// | 2 | Ymm, Mem, Ymm |
487/// +---+---------------+
488/// ```
489pub trait VpgatherqqEmitter_3<A, B, C> {
490    fn vpgatherqq_3(&mut self, op0: A, op1: B, op2: C);
491}
492
493impl<'a> VpgatherqqEmitter_3<Xmm, Mem, Xmm> for Assembler<'a> {
494    fn vpgatherqq_3(&mut self, op0: Xmm, op1: Mem, op2: Xmm) {
495        self.emit(
496            VPGATHERQQ128RMR,
497            op0.as_operand(),
498            op1.as_operand(),
499            op2.as_operand(),
500            &NOREG,
501        );
502    }
503}
504
505impl<'a> VpgatherqqEmitter_3<Ymm, Mem, Ymm> for Assembler<'a> {
506    fn vpgatherqq_3(&mut self, op0: Ymm, op1: Mem, op2: Ymm) {
507        self.emit(
508            VPGATHERQQ256RMR,
509            op0.as_operand(),
510            op1.as_operand(),
511            op2.as_operand(),
512            &NOREG,
513        );
514    }
515}
516
517/// `VPMASKMOVD`.
518///
519/// Supported operand variants:
520///
521/// ```text
522/// +---+---------------+
523/// | # | Operands      |
524/// +---+---------------+
525/// | 1 | Mem, Xmm, Xmm |
526/// | 2 | Mem, Ymm, Ymm |
527/// | 3 | Xmm, Xmm, Mem |
528/// | 4 | Ymm, Ymm, Mem |
529/// +---+---------------+
530/// ```
531pub trait VpmaskmovdEmitter<A, B, C> {
532    fn vpmaskmovd(&mut self, op0: A, op1: B, op2: C);
533}
534
535impl<'a> VpmaskmovdEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
536    fn vpmaskmovd(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
537        self.emit(
538            VPMASKMOVD128RRM,
539            op0.as_operand(),
540            op1.as_operand(),
541            op2.as_operand(),
542            &NOREG,
543        );
544    }
545}
546
547impl<'a> VpmaskmovdEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
548    fn vpmaskmovd(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
549        self.emit(
550            VPMASKMOVD256RRM,
551            op0.as_operand(),
552            op1.as_operand(),
553            op2.as_operand(),
554            &NOREG,
555        );
556    }
557}
558
559impl<'a> VpmaskmovdEmitter<Mem, Xmm, Xmm> for Assembler<'a> {
560    fn vpmaskmovd(&mut self, op0: Mem, op1: Xmm, op2: Xmm) {
561        self.emit(
562            VPMASKMOVD128MRR,
563            op0.as_operand(),
564            op1.as_operand(),
565            op2.as_operand(),
566            &NOREG,
567        );
568    }
569}
570
571impl<'a> VpmaskmovdEmitter<Mem, Ymm, Ymm> for Assembler<'a> {
572    fn vpmaskmovd(&mut self, op0: Mem, op1: Ymm, op2: Ymm) {
573        self.emit(
574            VPMASKMOVD256MRR,
575            op0.as_operand(),
576            op1.as_operand(),
577            op2.as_operand(),
578            &NOREG,
579        );
580    }
581}
582
583/// `VPMASKMOVQ`.
584///
585/// Supported operand variants:
586///
587/// ```text
588/// +---+---------------+
589/// | # | Operands      |
590/// +---+---------------+
591/// | 1 | Mem, Xmm, Xmm |
592/// | 2 | Mem, Ymm, Ymm |
593/// | 3 | Xmm, Xmm, Mem |
594/// | 4 | Ymm, Ymm, Mem |
595/// +---+---------------+
596/// ```
597pub trait VpmaskmovqEmitter<A, B, C> {
598    fn vpmaskmovq(&mut self, op0: A, op1: B, op2: C);
599}
600
601impl<'a> VpmaskmovqEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
602    fn vpmaskmovq(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
603        self.emit(
604            VPMASKMOVQ128RRM,
605            op0.as_operand(),
606            op1.as_operand(),
607            op2.as_operand(),
608            &NOREG,
609        );
610    }
611}
612
613impl<'a> VpmaskmovqEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
614    fn vpmaskmovq(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
615        self.emit(
616            VPMASKMOVQ256RRM,
617            op0.as_operand(),
618            op1.as_operand(),
619            op2.as_operand(),
620            &NOREG,
621        );
622    }
623}
624
625impl<'a> VpmaskmovqEmitter<Mem, Xmm, Xmm> for Assembler<'a> {
626    fn vpmaskmovq(&mut self, op0: Mem, op1: Xmm, op2: Xmm) {
627        self.emit(
628            VPMASKMOVQ128MRR,
629            op0.as_operand(),
630            op1.as_operand(),
631            op2.as_operand(),
632            &NOREG,
633        );
634    }
635}
636
637impl<'a> VpmaskmovqEmitter<Mem, Ymm, Ymm> for Assembler<'a> {
638    fn vpmaskmovq(&mut self, op0: Mem, op1: Ymm, op2: Ymm) {
639        self.emit(
640            VPMASKMOVQ256MRR,
641            op0.as_operand(),
642            op1.as_operand(),
643            op2.as_operand(),
644            &NOREG,
645        );
646    }
647}
648
649impl<'a> Assembler<'a> {
650    /// `VBROADCASTI128`.
651    ///
652    /// Supported operand variants:
653    ///
654    /// ```text
655    /// +---+----------+
656    /// | # | Operands |
657    /// +---+----------+
658    /// | 1 | Ymm, Mem |
659    /// +---+----------+
660    /// ```
661    #[inline]
662    pub fn vbroadcasti128<A, B>(&mut self, op0: A, op1: B)
663    where
664        Assembler<'a>: Vbroadcasti128Emitter<A, B>,
665    {
666        <Self as Vbroadcasti128Emitter<A, B>>::vbroadcasti128(self, op0, op1);
667    }
668    /// `VEXTRACTI128`.
669    ///
670    /// Supported operand variants:
671    ///
672    /// ```text
673    /// +---+---------------+
674    /// | # | Operands      |
675    /// +---+---------------+
676    /// | 1 | Mem, Ymm, Imm |
677    /// | 2 | Xmm, Ymm, Imm |
678    /// +---+---------------+
679    /// ```
680    #[inline]
681    pub fn vextracti128<A, B, C>(&mut self, op0: A, op1: B, op2: C)
682    where
683        Assembler<'a>: Vextracti128Emitter<A, B, C>,
684    {
685        <Self as Vextracti128Emitter<A, B, C>>::vextracti128(self, op0, op1, op2);
686    }
687    /// `VGATHERDPD`.
688    ///
689    /// Supported operand variants:
690    ///
691    /// ```text
692    /// +---+---------------+
693    /// | # | Operands      |
694    /// +---+---------------+
695    /// | 1 | Xmm, Mem, Xmm |
696    /// | 2 | Ymm, Mem, Ymm |
697    /// +---+---------------+
698    /// ```
699    #[inline]
700    pub fn vgatherdpd_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
701    where
702        Assembler<'a>: VgatherdpdEmitter_3<A, B, C>,
703    {
704        <Self as VgatherdpdEmitter_3<A, B, C>>::vgatherdpd_3(self, op0, op1, op2);
705    }
706    /// `VGATHERDPS`.
707    ///
708    /// Supported operand variants:
709    ///
710    /// ```text
711    /// +---+---------------+
712    /// | # | Operands      |
713    /// +---+---------------+
714    /// | 1 | Xmm, Mem, Xmm |
715    /// | 2 | Ymm, Mem, Ymm |
716    /// +---+---------------+
717    /// ```
718    #[inline]
719    pub fn vgatherdps_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
720    where
721        Assembler<'a>: VgatherdpsEmitter_3<A, B, C>,
722    {
723        <Self as VgatherdpsEmitter_3<A, B, C>>::vgatherdps_3(self, op0, op1, op2);
724    }
725    /// `VGATHERQPD`.
726    ///
727    /// Supported operand variants:
728    ///
729    /// ```text
730    /// +---+---------------+
731    /// | # | Operands      |
732    /// +---+---------------+
733    /// | 1 | Xmm, Mem, Xmm |
734    /// | 2 | Ymm, Mem, Ymm |
735    /// +---+---------------+
736    /// ```
737    #[inline]
738    pub fn vgatherqpd_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
739    where
740        Assembler<'a>: VgatherqpdEmitter_3<A, B, C>,
741    {
742        <Self as VgatherqpdEmitter_3<A, B, C>>::vgatherqpd_3(self, op0, op1, op2);
743    }
744    /// `VGATHERQPS`.
745    ///
746    /// Supported operand variants:
747    ///
748    /// ```text
749    /// +---+---------------+
750    /// | # | Operands      |
751    /// +---+---------------+
752    /// | 1 | Xmm, Mem, Xmm |
753    /// +---+---------------+
754    /// ```
755    #[inline]
756    pub fn vgatherqps_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
757    where
758        Assembler<'a>: VgatherqpsEmitter_3<A, B, C>,
759    {
760        <Self as VgatherqpsEmitter_3<A, B, C>>::vgatherqps_3(self, op0, op1, op2);
761    }
762    /// `VINSERTI128`.
763    ///
764    /// Supported operand variants:
765    ///
766    /// ```text
767    /// +---+--------------------+
768    /// | # | Operands           |
769    /// +---+--------------------+
770    /// | 1 | Ymm, Ymm, Mem, Imm |
771    /// | 2 | Ymm, Ymm, Xmm, Imm |
772    /// +---+--------------------+
773    /// ```
774    #[inline]
775    pub fn vinserti128<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
776    where
777        Assembler<'a>: Vinserti128Emitter<A, B, C, D>,
778    {
779        <Self as Vinserti128Emitter<A, B, C, D>>::vinserti128(self, op0, op1, op2, op3);
780    }
781    /// `VPBLENDD`.
782    ///
783    /// Supported operand variants:
784    ///
785    /// ```text
786    /// +---+--------------------+
787    /// | # | Operands           |
788    /// +---+--------------------+
789    /// | 1 | Xmm, Xmm, Mem, Imm |
790    /// | 2 | Xmm, Xmm, Xmm, Imm |
791    /// | 3 | Ymm, Ymm, Mem, Imm |
792    /// | 4 | Ymm, Ymm, Ymm, Imm |
793    /// +---+--------------------+
794    /// ```
795    #[inline]
796    pub fn vpblendd<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
797    where
798        Assembler<'a>: VpblenddEmitter<A, B, C, D>,
799    {
800        <Self as VpblenddEmitter<A, B, C, D>>::vpblendd(self, op0, op1, op2, op3);
801    }
802    /// `VPERM2I128`.
803    ///
804    /// Supported operand variants:
805    ///
806    /// ```text
807    /// +---+--------------------+
808    /// | # | Operands           |
809    /// +---+--------------------+
810    /// | 1 | Ymm, Ymm, Mem, Imm |
811    /// | 2 | Ymm, Ymm, Ymm, Imm |
812    /// +---+--------------------+
813    /// ```
814    #[inline]
815    pub fn vperm2i128<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
816    where
817        Assembler<'a>: Vperm2i128Emitter<A, B, C, D>,
818    {
819        <Self as Vperm2i128Emitter<A, B, C, D>>::vperm2i128(self, op0, op1, op2, op3);
820    }
821    /// `VPGATHERDD`.
822    ///
823    /// Supported operand variants:
824    ///
825    /// ```text
826    /// +---+---------------+
827    /// | # | Operands      |
828    /// +---+---------------+
829    /// | 1 | Xmm, Mem, Xmm |
830    /// | 2 | Ymm, Mem, Ymm |
831    /// +---+---------------+
832    /// ```
833    #[inline]
834    pub fn vpgatherdd_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
835    where
836        Assembler<'a>: VpgatherddEmitter_3<A, B, C>,
837    {
838        <Self as VpgatherddEmitter_3<A, B, C>>::vpgatherdd_3(self, op0, op1, op2);
839    }
840    /// `VPGATHERDQ`.
841    ///
842    /// Supported operand variants:
843    ///
844    /// ```text
845    /// +---+---------------+
846    /// | # | Operands      |
847    /// +---+---------------+
848    /// | 1 | Xmm, Mem, Xmm |
849    /// | 2 | Ymm, Mem, Ymm |
850    /// +---+---------------+
851    /// ```
852    #[inline]
853    pub fn vpgatherdq_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
854    where
855        Assembler<'a>: VpgatherdqEmitter_3<A, B, C>,
856    {
857        <Self as VpgatherdqEmitter_3<A, B, C>>::vpgatherdq_3(self, op0, op1, op2);
858    }
859    /// `VPGATHERQD`.
860    ///
861    /// Supported operand variants:
862    ///
863    /// ```text
864    /// +---+---------------+
865    /// | # | Operands      |
866    /// +---+---------------+
867    /// | 1 | Xmm, Mem, Xmm |
868    /// +---+---------------+
869    /// ```
870    #[inline]
871    pub fn vpgatherqd_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
872    where
873        Assembler<'a>: VpgatherqdEmitter_3<A, B, C>,
874    {
875        <Self as VpgatherqdEmitter_3<A, B, C>>::vpgatherqd_3(self, op0, op1, op2);
876    }
877    /// `VPGATHERQQ`.
878    ///
879    /// Supported operand variants:
880    ///
881    /// ```text
882    /// +---+---------------+
883    /// | # | Operands      |
884    /// +---+---------------+
885    /// | 1 | Xmm, Mem, Xmm |
886    /// | 2 | Ymm, Mem, Ymm |
887    /// +---+---------------+
888    /// ```
889    #[inline]
890    pub fn vpgatherqq_3<A, B, C>(&mut self, op0: A, op1: B, op2: C)
891    where
892        Assembler<'a>: VpgatherqqEmitter_3<A, B, C>,
893    {
894        <Self as VpgatherqqEmitter_3<A, B, C>>::vpgatherqq_3(self, op0, op1, op2);
895    }
896    /// `VPMASKMOVD`.
897    ///
898    /// Supported operand variants:
899    ///
900    /// ```text
901    /// +---+---------------+
902    /// | # | Operands      |
903    /// +---+---------------+
904    /// | 1 | Mem, Xmm, Xmm |
905    /// | 2 | Mem, Ymm, Ymm |
906    /// | 3 | Xmm, Xmm, Mem |
907    /// | 4 | Ymm, Ymm, Mem |
908    /// +---+---------------+
909    /// ```
910    #[inline]
911    pub fn vpmaskmovd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
912    where
913        Assembler<'a>: VpmaskmovdEmitter<A, B, C>,
914    {
915        <Self as VpmaskmovdEmitter<A, B, C>>::vpmaskmovd(self, op0, op1, op2);
916    }
917    /// `VPMASKMOVQ`.
918    ///
919    /// Supported operand variants:
920    ///
921    /// ```text
922    /// +---+---------------+
923    /// | # | Operands      |
924    /// +---+---------------+
925    /// | 1 | Mem, Xmm, Xmm |
926    /// | 2 | Mem, Ymm, Ymm |
927    /// | 3 | Xmm, Xmm, Mem |
928    /// | 4 | Ymm, Ymm, Mem |
929    /// +---+---------------+
930    /// ```
931    #[inline]
932    pub fn vpmaskmovq<A, B, C>(&mut self, op0: A, op1: B, op2: C)
933    where
934        Assembler<'a>: VpmaskmovqEmitter<A, B, C>,
935    {
936        <Self as VpmaskmovqEmitter<A, B, C>>::vpmaskmovq(self, op0, op1, op2);
937    }
938}