Skip to main content

asmkit/x86/features/
AVX512DQ.rs

1use super::super::opcodes::*;
2use crate::core::emitter::*;
3use crate::core::operand::*;
4use crate::x86::assembler::*;
5use crate::x86::operands::*;
6
7/// A dummy operand that represents no register. Here just for simplicity.
8const NOREG: Operand = Operand::new();
9
10/// `KADDB`.
11///
12/// Supported operand variants:
13///
14/// ```text
15/// +---+------------------+
16/// | # | Operands         |
17/// +---+------------------+
18/// | 1 | KReg, KReg, KReg |
19/// +---+------------------+
20/// ```
21pub trait KaddbEmitter<A, B, C> {
22    fn kaddb(&mut self, op0: A, op1: B, op2: C);
23}
24
25impl<'a> KaddbEmitter<KReg, KReg, KReg> for Assembler<'a> {
26    fn kaddb(&mut self, op0: KReg, op1: KReg, op2: KReg) {
27        self.emit(
28            KADDBKKK,
29            op0.as_operand(),
30            op1.as_operand(),
31            op2.as_operand(),
32            &NOREG,
33        );
34    }
35}
36
37/// `KADDW`.
38///
39/// Supported operand variants:
40///
41/// ```text
42/// +---+------------------+
43/// | # | Operands         |
44/// +---+------------------+
45/// | 1 | KReg, KReg, KReg |
46/// +---+------------------+
47/// ```
48pub trait KaddwEmitter<A, B, C> {
49    fn kaddw(&mut self, op0: A, op1: B, op2: C);
50}
51
52impl<'a> KaddwEmitter<KReg, KReg, KReg> for Assembler<'a> {
53    fn kaddw(&mut self, op0: KReg, op1: KReg, op2: KReg) {
54        self.emit(
55            KADDWKKK,
56            op0.as_operand(),
57            op1.as_operand(),
58            op2.as_operand(),
59            &NOREG,
60        );
61    }
62}
63
64/// `KANDB`.
65///
66/// Supported operand variants:
67///
68/// ```text
69/// +---+------------------+
70/// | # | Operands         |
71/// +---+------------------+
72/// | 1 | KReg, KReg, KReg |
73/// +---+------------------+
74/// ```
75pub trait KandbEmitter<A, B, C> {
76    fn kandb(&mut self, op0: A, op1: B, op2: C);
77}
78
79impl<'a> KandbEmitter<KReg, KReg, KReg> for Assembler<'a> {
80    fn kandb(&mut self, op0: KReg, op1: KReg, op2: KReg) {
81        self.emit(
82            KANDBKKK,
83            op0.as_operand(),
84            op1.as_operand(),
85            op2.as_operand(),
86            &NOREG,
87        );
88    }
89}
90
91/// `KANDNB`.
92///
93/// Supported operand variants:
94///
95/// ```text
96/// +---+------------------+
97/// | # | Operands         |
98/// +---+------------------+
99/// | 1 | KReg, KReg, KReg |
100/// +---+------------------+
101/// ```
102pub trait KandnbEmitter<A, B, C> {
103    fn kandnb(&mut self, op0: A, op1: B, op2: C);
104}
105
106impl<'a> KandnbEmitter<KReg, KReg, KReg> for Assembler<'a> {
107    fn kandnb(&mut self, op0: KReg, op1: KReg, op2: KReg) {
108        self.emit(
109            KANDNBKKK,
110            op0.as_operand(),
111            op1.as_operand(),
112            op2.as_operand(),
113            &NOREG,
114        );
115    }
116}
117
118/// `KMOVB`.
119///
120/// Supported operand variants:
121///
122/// ```text
123/// +---+------------+
124/// | # | Operands   |
125/// +---+------------+
126/// | 1 | Gpd, KReg  |
127/// | 2 | KReg, Gpd  |
128/// | 3 | KReg, KReg |
129/// | 4 | KReg, Mem  |
130/// | 5 | Mem, KReg  |
131/// +---+------------+
132/// ```
133pub trait KmovbEmitter<A, B> {
134    fn kmovb(&mut self, op0: A, op1: B);
135}
136
137impl<'a> KmovbEmitter<KReg, KReg> for Assembler<'a> {
138    fn kmovb(&mut self, op0: KReg, op1: KReg) {
139        self.emit(KMOVBKK, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
140    }
141}
142
143impl<'a> KmovbEmitter<KReg, Mem> for Assembler<'a> {
144    fn kmovb(&mut self, op0: KReg, op1: Mem) {
145        self.emit(KMOVBKM, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
146    }
147}
148
149impl<'a> KmovbEmitter<Mem, KReg> for Assembler<'a> {
150    fn kmovb(&mut self, op0: Mem, op1: KReg) {
151        self.emit(KMOVBMK, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
152    }
153}
154
155impl<'a> KmovbEmitter<KReg, Gpd> for Assembler<'a> {
156    fn kmovb(&mut self, op0: KReg, op1: Gpd) {
157        self.emit(KMOVBKR, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
158    }
159}
160
161impl<'a> KmovbEmitter<Gpd, KReg> for Assembler<'a> {
162    fn kmovb(&mut self, op0: Gpd, op1: KReg) {
163        self.emit(KMOVBRK, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
164    }
165}
166
167/// `KNOTB`.
168///
169/// Supported operand variants:
170///
171/// ```text
172/// +---+------------+
173/// | # | Operands   |
174/// +---+------------+
175/// | 1 | KReg, KReg |
176/// +---+------------+
177/// ```
178pub trait KnotbEmitter<A, B> {
179    fn knotb(&mut self, op0: A, op1: B);
180}
181
182impl<'a> KnotbEmitter<KReg, KReg> for Assembler<'a> {
183    fn knotb(&mut self, op0: KReg, op1: KReg) {
184        self.emit(KNOTBKK, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
185    }
186}
187
188/// `KORB`.
189///
190/// Supported operand variants:
191///
192/// ```text
193/// +---+------------------+
194/// | # | Operands         |
195/// +---+------------------+
196/// | 1 | KReg, KReg, KReg |
197/// +---+------------------+
198/// ```
199pub trait KorbEmitter<A, B, C> {
200    fn korb(&mut self, op0: A, op1: B, op2: C);
201}
202
203impl<'a> KorbEmitter<KReg, KReg, KReg> for Assembler<'a> {
204    fn korb(&mut self, op0: KReg, op1: KReg, op2: KReg) {
205        self.emit(
206            KORBKKK,
207            op0.as_operand(),
208            op1.as_operand(),
209            op2.as_operand(),
210            &NOREG,
211        );
212    }
213}
214
215/// `KORTESTB`.
216///
217/// Supported operand variants:
218///
219/// ```text
220/// +---+------------+
221/// | # | Operands   |
222/// +---+------------+
223/// | 1 | KReg, KReg |
224/// +---+------------+
225/// ```
226pub trait KortestbEmitter<A, B> {
227    fn kortestb(&mut self, op0: A, op1: B);
228}
229
230impl<'a> KortestbEmitter<KReg, KReg> for Assembler<'a> {
231    fn kortestb(&mut self, op0: KReg, op1: KReg) {
232        self.emit(
233            KORTESTBKK,
234            op0.as_operand(),
235            op1.as_operand(),
236            &NOREG,
237            &NOREG,
238        );
239    }
240}
241
242/// `KSHIFTLB`.
243///
244/// Supported operand variants:
245///
246/// ```text
247/// +---+-----------------+
248/// | # | Operands        |
249/// +---+-----------------+
250/// | 1 | KReg, KReg, Imm |
251/// +---+-----------------+
252/// ```
253pub trait KshiftlbEmitter<A, B, C> {
254    fn kshiftlb(&mut self, op0: A, op1: B, op2: C);
255}
256
257impl<'a> KshiftlbEmitter<KReg, KReg, Imm> for Assembler<'a> {
258    fn kshiftlb(&mut self, op0: KReg, op1: KReg, op2: Imm) {
259        self.emit(
260            KSHIFTLBKKI,
261            op0.as_operand(),
262            op1.as_operand(),
263            op2.as_operand(),
264            &NOREG,
265        );
266    }
267}
268
269/// `KSHIFTRB`.
270///
271/// Supported operand variants:
272///
273/// ```text
274/// +---+-----------------+
275/// | # | Operands        |
276/// +---+-----------------+
277/// | 1 | KReg, KReg, Imm |
278/// +---+-----------------+
279/// ```
280pub trait KshiftrbEmitter<A, B, C> {
281    fn kshiftrb(&mut self, op0: A, op1: B, op2: C);
282}
283
284impl<'a> KshiftrbEmitter<KReg, KReg, Imm> for Assembler<'a> {
285    fn kshiftrb(&mut self, op0: KReg, op1: KReg, op2: Imm) {
286        self.emit(
287            KSHIFTRBKKI,
288            op0.as_operand(),
289            op1.as_operand(),
290            op2.as_operand(),
291            &NOREG,
292        );
293    }
294}
295
296/// `KTESTB`.
297///
298/// Supported operand variants:
299///
300/// ```text
301/// +---+------------+
302/// | # | Operands   |
303/// +---+------------+
304/// | 1 | KReg, KReg |
305/// +---+------------+
306/// ```
307pub trait KtestbEmitter<A, B> {
308    fn ktestb(&mut self, op0: A, op1: B);
309}
310
311impl<'a> KtestbEmitter<KReg, KReg> for Assembler<'a> {
312    fn ktestb(&mut self, op0: KReg, op1: KReg) {
313        self.emit(KTESTBKK, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
314    }
315}
316
317/// `KTESTW`.
318///
319/// Supported operand variants:
320///
321/// ```text
322/// +---+------------+
323/// | # | Operands   |
324/// +---+------------+
325/// | 1 | KReg, KReg |
326/// +---+------------+
327/// ```
328pub trait KtestwEmitter<A, B> {
329    fn ktestw(&mut self, op0: A, op1: B);
330}
331
332impl<'a> KtestwEmitter<KReg, KReg> for Assembler<'a> {
333    fn ktestw(&mut self, op0: KReg, op1: KReg) {
334        self.emit(KTESTWKK, op0.as_operand(), op1.as_operand(), &NOREG, &NOREG);
335    }
336}
337
338/// `KXNORB`.
339///
340/// Supported operand variants:
341///
342/// ```text
343/// +---+------------------+
344/// | # | Operands         |
345/// +---+------------------+
346/// | 1 | KReg, KReg, KReg |
347/// +---+------------------+
348/// ```
349pub trait KxnorbEmitter<A, B, C> {
350    fn kxnorb(&mut self, op0: A, op1: B, op2: C);
351}
352
353impl<'a> KxnorbEmitter<KReg, KReg, KReg> for Assembler<'a> {
354    fn kxnorb(&mut self, op0: KReg, op1: KReg, op2: KReg) {
355        self.emit(
356            KXNORBKKK,
357            op0.as_operand(),
358            op1.as_operand(),
359            op2.as_operand(),
360            &NOREG,
361        );
362    }
363}
364
365/// `KXORB`.
366///
367/// Supported operand variants:
368///
369/// ```text
370/// +---+------------------+
371/// | # | Operands         |
372/// +---+------------------+
373/// | 1 | KReg, KReg, KReg |
374/// +---+------------------+
375/// ```
376pub trait KxorbEmitter<A, B, C> {
377    fn kxorb(&mut self, op0: A, op1: B, op2: C);
378}
379
380impl<'a> KxorbEmitter<KReg, KReg, KReg> for Assembler<'a> {
381    fn kxorb(&mut self, op0: KReg, op1: KReg, op2: KReg) {
382        self.emit(
383            KXORBKKK,
384            op0.as_operand(),
385            op1.as_operand(),
386            op2.as_operand(),
387            &NOREG,
388        );
389    }
390}
391
392/// `VANDNPD`.
393///
394/// Supported operand variants:
395///
396/// ```text
397/// +---+---------------+
398/// | # | Operands      |
399/// +---+---------------+
400/// | 1 | Xmm, Xmm, Mem |
401/// | 2 | Xmm, Xmm, Xmm |
402/// | 3 | Ymm, Ymm, Mem |
403/// | 4 | Ymm, Ymm, Ymm |
404/// | 5 | Zmm, Zmm, Mem |
405/// | 6 | Zmm, Zmm, Zmm |
406/// +---+---------------+
407/// ```
408pub trait VandnpdEmitter<A, B, C> {
409    fn vandnpd(&mut self, op0: A, op1: B, op2: C);
410}
411
412impl<'a> VandnpdEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
413    fn vandnpd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
414        self.emit(
415            VANDNPD128RRR,
416            op0.as_operand(),
417            op1.as_operand(),
418            op2.as_operand(),
419            &NOREG,
420        );
421    }
422}
423
424impl<'a> VandnpdEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
425    fn vandnpd(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
426        self.emit(
427            VANDNPD128RRM,
428            op0.as_operand(),
429            op1.as_operand(),
430            op2.as_operand(),
431            &NOREG,
432        );
433    }
434}
435
436impl<'a> VandnpdEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
437    fn vandnpd(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
438        self.emit(
439            VANDNPD256RRR,
440            op0.as_operand(),
441            op1.as_operand(),
442            op2.as_operand(),
443            &NOREG,
444        );
445    }
446}
447
448impl<'a> VandnpdEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
449    fn vandnpd(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
450        self.emit(
451            VANDNPD256RRM,
452            op0.as_operand(),
453            op1.as_operand(),
454            op2.as_operand(),
455            &NOREG,
456        );
457    }
458}
459
460impl<'a> VandnpdEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
461    fn vandnpd(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
462        self.emit(
463            VANDNPD512RRR,
464            op0.as_operand(),
465            op1.as_operand(),
466            op2.as_operand(),
467            &NOREG,
468        );
469    }
470}
471
472impl<'a> VandnpdEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
473    fn vandnpd(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
474        self.emit(
475            VANDNPD512RRM,
476            op0.as_operand(),
477            op1.as_operand(),
478            op2.as_operand(),
479            &NOREG,
480        );
481    }
482}
483
484/// `VANDNPD_MASK`.
485///
486/// Supported operand variants:
487///
488/// ```text
489/// +---+---------------+
490/// | # | Operands      |
491/// +---+---------------+
492/// | 1 | Xmm, Xmm, Mem |
493/// | 2 | Xmm, Xmm, Xmm |
494/// | 3 | Ymm, Ymm, Mem |
495/// | 4 | Ymm, Ymm, Ymm |
496/// | 5 | Zmm, Zmm, Mem |
497/// | 6 | Zmm, Zmm, Zmm |
498/// +---+---------------+
499/// ```
500pub trait VandnpdMaskEmitter<A, B, C> {
501    fn vandnpd_mask(&mut self, op0: A, op1: B, op2: C);
502}
503
504impl<'a> VandnpdMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
505    fn vandnpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
506        self.emit(
507            VANDNPD128RRR_MASK,
508            op0.as_operand(),
509            op1.as_operand(),
510            op2.as_operand(),
511            &NOREG,
512        );
513    }
514}
515
516impl<'a> VandnpdMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
517    fn vandnpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
518        self.emit(
519            VANDNPD128RRM_MASK,
520            op0.as_operand(),
521            op1.as_operand(),
522            op2.as_operand(),
523            &NOREG,
524        );
525    }
526}
527
528impl<'a> VandnpdMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
529    fn vandnpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
530        self.emit(
531            VANDNPD256RRR_MASK,
532            op0.as_operand(),
533            op1.as_operand(),
534            op2.as_operand(),
535            &NOREG,
536        );
537    }
538}
539
540impl<'a> VandnpdMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
541    fn vandnpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
542        self.emit(
543            VANDNPD256RRM_MASK,
544            op0.as_operand(),
545            op1.as_operand(),
546            op2.as_operand(),
547            &NOREG,
548        );
549    }
550}
551
552impl<'a> VandnpdMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
553    fn vandnpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
554        self.emit(
555            VANDNPD512RRR_MASK,
556            op0.as_operand(),
557            op1.as_operand(),
558            op2.as_operand(),
559            &NOREG,
560        );
561    }
562}
563
564impl<'a> VandnpdMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
565    fn vandnpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
566        self.emit(
567            VANDNPD512RRM_MASK,
568            op0.as_operand(),
569            op1.as_operand(),
570            op2.as_operand(),
571            &NOREG,
572        );
573    }
574}
575
576/// `VANDNPD_MASKZ`.
577///
578/// Supported operand variants:
579///
580/// ```text
581/// +---+---------------+
582/// | # | Operands      |
583/// +---+---------------+
584/// | 1 | Xmm, Xmm, Mem |
585/// | 2 | Xmm, Xmm, Xmm |
586/// | 3 | Ymm, Ymm, Mem |
587/// | 4 | Ymm, Ymm, Ymm |
588/// | 5 | Zmm, Zmm, Mem |
589/// | 6 | Zmm, Zmm, Zmm |
590/// +---+---------------+
591/// ```
592pub trait VandnpdMaskzEmitter<A, B, C> {
593    fn vandnpd_maskz(&mut self, op0: A, op1: B, op2: C);
594}
595
596impl<'a> VandnpdMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
597    fn vandnpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
598        self.emit(
599            VANDNPD128RRR_MASKZ,
600            op0.as_operand(),
601            op1.as_operand(),
602            op2.as_operand(),
603            &NOREG,
604        );
605    }
606}
607
608impl<'a> VandnpdMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
609    fn vandnpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
610        self.emit(
611            VANDNPD128RRM_MASKZ,
612            op0.as_operand(),
613            op1.as_operand(),
614            op2.as_operand(),
615            &NOREG,
616        );
617    }
618}
619
620impl<'a> VandnpdMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
621    fn vandnpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
622        self.emit(
623            VANDNPD256RRR_MASKZ,
624            op0.as_operand(),
625            op1.as_operand(),
626            op2.as_operand(),
627            &NOREG,
628        );
629    }
630}
631
632impl<'a> VandnpdMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
633    fn vandnpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
634        self.emit(
635            VANDNPD256RRM_MASKZ,
636            op0.as_operand(),
637            op1.as_operand(),
638            op2.as_operand(),
639            &NOREG,
640        );
641    }
642}
643
644impl<'a> VandnpdMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
645    fn vandnpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
646        self.emit(
647            VANDNPD512RRR_MASKZ,
648            op0.as_operand(),
649            op1.as_operand(),
650            op2.as_operand(),
651            &NOREG,
652        );
653    }
654}
655
656impl<'a> VandnpdMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
657    fn vandnpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
658        self.emit(
659            VANDNPD512RRM_MASKZ,
660            op0.as_operand(),
661            op1.as_operand(),
662            op2.as_operand(),
663            &NOREG,
664        );
665    }
666}
667
668/// `VANDNPS`.
669///
670/// Supported operand variants:
671///
672/// ```text
673/// +---+---------------+
674/// | # | Operands      |
675/// +---+---------------+
676/// | 1 | Xmm, Xmm, Mem |
677/// | 2 | Xmm, Xmm, Xmm |
678/// | 3 | Ymm, Ymm, Mem |
679/// | 4 | Ymm, Ymm, Ymm |
680/// | 5 | Zmm, Zmm, Mem |
681/// | 6 | Zmm, Zmm, Zmm |
682/// +---+---------------+
683/// ```
684pub trait VandnpsEmitter<A, B, C> {
685    fn vandnps(&mut self, op0: A, op1: B, op2: C);
686}
687
688impl<'a> VandnpsEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
689    fn vandnps(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
690        self.emit(
691            VANDNPS128RRR,
692            op0.as_operand(),
693            op1.as_operand(),
694            op2.as_operand(),
695            &NOREG,
696        );
697    }
698}
699
700impl<'a> VandnpsEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
701    fn vandnps(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
702        self.emit(
703            VANDNPS128RRM,
704            op0.as_operand(),
705            op1.as_operand(),
706            op2.as_operand(),
707            &NOREG,
708        );
709    }
710}
711
712impl<'a> VandnpsEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
713    fn vandnps(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
714        self.emit(
715            VANDNPS256RRR,
716            op0.as_operand(),
717            op1.as_operand(),
718            op2.as_operand(),
719            &NOREG,
720        );
721    }
722}
723
724impl<'a> VandnpsEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
725    fn vandnps(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
726        self.emit(
727            VANDNPS256RRM,
728            op0.as_operand(),
729            op1.as_operand(),
730            op2.as_operand(),
731            &NOREG,
732        );
733    }
734}
735
736impl<'a> VandnpsEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
737    fn vandnps(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
738        self.emit(
739            VANDNPS512RRR,
740            op0.as_operand(),
741            op1.as_operand(),
742            op2.as_operand(),
743            &NOREG,
744        );
745    }
746}
747
748impl<'a> VandnpsEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
749    fn vandnps(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
750        self.emit(
751            VANDNPS512RRM,
752            op0.as_operand(),
753            op1.as_operand(),
754            op2.as_operand(),
755            &NOREG,
756        );
757    }
758}
759
760/// `VANDNPS_MASK`.
761///
762/// Supported operand variants:
763///
764/// ```text
765/// +---+---------------+
766/// | # | Operands      |
767/// +---+---------------+
768/// | 1 | Xmm, Xmm, Mem |
769/// | 2 | Xmm, Xmm, Xmm |
770/// | 3 | Ymm, Ymm, Mem |
771/// | 4 | Ymm, Ymm, Ymm |
772/// | 5 | Zmm, Zmm, Mem |
773/// | 6 | Zmm, Zmm, Zmm |
774/// +---+---------------+
775/// ```
776pub trait VandnpsMaskEmitter<A, B, C> {
777    fn vandnps_mask(&mut self, op0: A, op1: B, op2: C);
778}
779
780impl<'a> VandnpsMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
781    fn vandnps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
782        self.emit(
783            VANDNPS128RRR_MASK,
784            op0.as_operand(),
785            op1.as_operand(),
786            op2.as_operand(),
787            &NOREG,
788        );
789    }
790}
791
792impl<'a> VandnpsMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
793    fn vandnps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
794        self.emit(
795            VANDNPS128RRM_MASK,
796            op0.as_operand(),
797            op1.as_operand(),
798            op2.as_operand(),
799            &NOREG,
800        );
801    }
802}
803
804impl<'a> VandnpsMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
805    fn vandnps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
806        self.emit(
807            VANDNPS256RRR_MASK,
808            op0.as_operand(),
809            op1.as_operand(),
810            op2.as_operand(),
811            &NOREG,
812        );
813    }
814}
815
816impl<'a> VandnpsMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
817    fn vandnps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
818        self.emit(
819            VANDNPS256RRM_MASK,
820            op0.as_operand(),
821            op1.as_operand(),
822            op2.as_operand(),
823            &NOREG,
824        );
825    }
826}
827
828impl<'a> VandnpsMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
829    fn vandnps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
830        self.emit(
831            VANDNPS512RRR_MASK,
832            op0.as_operand(),
833            op1.as_operand(),
834            op2.as_operand(),
835            &NOREG,
836        );
837    }
838}
839
840impl<'a> VandnpsMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
841    fn vandnps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
842        self.emit(
843            VANDNPS512RRM_MASK,
844            op0.as_operand(),
845            op1.as_operand(),
846            op2.as_operand(),
847            &NOREG,
848        );
849    }
850}
851
852/// `VANDNPS_MASKZ`.
853///
854/// Supported operand variants:
855///
856/// ```text
857/// +---+---------------+
858/// | # | Operands      |
859/// +---+---------------+
860/// | 1 | Xmm, Xmm, Mem |
861/// | 2 | Xmm, Xmm, Xmm |
862/// | 3 | Ymm, Ymm, Mem |
863/// | 4 | Ymm, Ymm, Ymm |
864/// | 5 | Zmm, Zmm, Mem |
865/// | 6 | Zmm, Zmm, Zmm |
866/// +---+---------------+
867/// ```
868pub trait VandnpsMaskzEmitter<A, B, C> {
869    fn vandnps_maskz(&mut self, op0: A, op1: B, op2: C);
870}
871
872impl<'a> VandnpsMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
873    fn vandnps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
874        self.emit(
875            VANDNPS128RRR_MASKZ,
876            op0.as_operand(),
877            op1.as_operand(),
878            op2.as_operand(),
879            &NOREG,
880        );
881    }
882}
883
884impl<'a> VandnpsMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
885    fn vandnps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
886        self.emit(
887            VANDNPS128RRM_MASKZ,
888            op0.as_operand(),
889            op1.as_operand(),
890            op2.as_operand(),
891            &NOREG,
892        );
893    }
894}
895
896impl<'a> VandnpsMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
897    fn vandnps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
898        self.emit(
899            VANDNPS256RRR_MASKZ,
900            op0.as_operand(),
901            op1.as_operand(),
902            op2.as_operand(),
903            &NOREG,
904        );
905    }
906}
907
908impl<'a> VandnpsMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
909    fn vandnps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
910        self.emit(
911            VANDNPS256RRM_MASKZ,
912            op0.as_operand(),
913            op1.as_operand(),
914            op2.as_operand(),
915            &NOREG,
916        );
917    }
918}
919
920impl<'a> VandnpsMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
921    fn vandnps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
922        self.emit(
923            VANDNPS512RRR_MASKZ,
924            op0.as_operand(),
925            op1.as_operand(),
926            op2.as_operand(),
927            &NOREG,
928        );
929    }
930}
931
932impl<'a> VandnpsMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
933    fn vandnps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
934        self.emit(
935            VANDNPS512RRM_MASKZ,
936            op0.as_operand(),
937            op1.as_operand(),
938            op2.as_operand(),
939            &NOREG,
940        );
941    }
942}
943
944/// `VANDPD`.
945///
946/// Supported operand variants:
947///
948/// ```text
949/// +---+---------------+
950/// | # | Operands      |
951/// +---+---------------+
952/// | 1 | Xmm, Xmm, Mem |
953/// | 2 | Xmm, Xmm, Xmm |
954/// | 3 | Ymm, Ymm, Mem |
955/// | 4 | Ymm, Ymm, Ymm |
956/// | 5 | Zmm, Zmm, Mem |
957/// | 6 | Zmm, Zmm, Zmm |
958/// +---+---------------+
959/// ```
960pub trait VandpdEmitter<A, B, C> {
961    fn vandpd(&mut self, op0: A, op1: B, op2: C);
962}
963
964impl<'a> VandpdEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
965    fn vandpd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
966        self.emit(
967            VANDPD128RRR,
968            op0.as_operand(),
969            op1.as_operand(),
970            op2.as_operand(),
971            &NOREG,
972        );
973    }
974}
975
976impl<'a> VandpdEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
977    fn vandpd(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
978        self.emit(
979            VANDPD128RRM,
980            op0.as_operand(),
981            op1.as_operand(),
982            op2.as_operand(),
983            &NOREG,
984        );
985    }
986}
987
988impl<'a> VandpdEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
989    fn vandpd(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
990        self.emit(
991            VANDPD256RRR,
992            op0.as_operand(),
993            op1.as_operand(),
994            op2.as_operand(),
995            &NOREG,
996        );
997    }
998}
999
1000impl<'a> VandpdEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
1001    fn vandpd(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
1002        self.emit(
1003            VANDPD256RRM,
1004            op0.as_operand(),
1005            op1.as_operand(),
1006            op2.as_operand(),
1007            &NOREG,
1008        );
1009    }
1010}
1011
1012impl<'a> VandpdEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
1013    fn vandpd(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
1014        self.emit(
1015            VANDPD512RRR,
1016            op0.as_operand(),
1017            op1.as_operand(),
1018            op2.as_operand(),
1019            &NOREG,
1020        );
1021    }
1022}
1023
1024impl<'a> VandpdEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
1025    fn vandpd(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
1026        self.emit(
1027            VANDPD512RRM,
1028            op0.as_operand(),
1029            op1.as_operand(),
1030            op2.as_operand(),
1031            &NOREG,
1032        );
1033    }
1034}
1035
1036/// `VANDPD_MASK`.
1037///
1038/// Supported operand variants:
1039///
1040/// ```text
1041/// +---+---------------+
1042/// | # | Operands      |
1043/// +---+---------------+
1044/// | 1 | Xmm, Xmm, Mem |
1045/// | 2 | Xmm, Xmm, Xmm |
1046/// | 3 | Ymm, Ymm, Mem |
1047/// | 4 | Ymm, Ymm, Ymm |
1048/// | 5 | Zmm, Zmm, Mem |
1049/// | 6 | Zmm, Zmm, Zmm |
1050/// +---+---------------+
1051/// ```
1052pub trait VandpdMaskEmitter<A, B, C> {
1053    fn vandpd_mask(&mut self, op0: A, op1: B, op2: C);
1054}
1055
1056impl<'a> VandpdMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
1057    fn vandpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
1058        self.emit(
1059            VANDPD128RRR_MASK,
1060            op0.as_operand(),
1061            op1.as_operand(),
1062            op2.as_operand(),
1063            &NOREG,
1064        );
1065    }
1066}
1067
1068impl<'a> VandpdMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
1069    fn vandpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
1070        self.emit(
1071            VANDPD128RRM_MASK,
1072            op0.as_operand(),
1073            op1.as_operand(),
1074            op2.as_operand(),
1075            &NOREG,
1076        );
1077    }
1078}
1079
1080impl<'a> VandpdMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
1081    fn vandpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
1082        self.emit(
1083            VANDPD256RRR_MASK,
1084            op0.as_operand(),
1085            op1.as_operand(),
1086            op2.as_operand(),
1087            &NOREG,
1088        );
1089    }
1090}
1091
1092impl<'a> VandpdMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
1093    fn vandpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
1094        self.emit(
1095            VANDPD256RRM_MASK,
1096            op0.as_operand(),
1097            op1.as_operand(),
1098            op2.as_operand(),
1099            &NOREG,
1100        );
1101    }
1102}
1103
1104impl<'a> VandpdMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
1105    fn vandpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
1106        self.emit(
1107            VANDPD512RRR_MASK,
1108            op0.as_operand(),
1109            op1.as_operand(),
1110            op2.as_operand(),
1111            &NOREG,
1112        );
1113    }
1114}
1115
1116impl<'a> VandpdMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
1117    fn vandpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
1118        self.emit(
1119            VANDPD512RRM_MASK,
1120            op0.as_operand(),
1121            op1.as_operand(),
1122            op2.as_operand(),
1123            &NOREG,
1124        );
1125    }
1126}
1127
1128/// `VANDPD_MASKZ`.
1129///
1130/// Supported operand variants:
1131///
1132/// ```text
1133/// +---+---------------+
1134/// | # | Operands      |
1135/// +---+---------------+
1136/// | 1 | Xmm, Xmm, Mem |
1137/// | 2 | Xmm, Xmm, Xmm |
1138/// | 3 | Ymm, Ymm, Mem |
1139/// | 4 | Ymm, Ymm, Ymm |
1140/// | 5 | Zmm, Zmm, Mem |
1141/// | 6 | Zmm, Zmm, Zmm |
1142/// +---+---------------+
1143/// ```
1144pub trait VandpdMaskzEmitter<A, B, C> {
1145    fn vandpd_maskz(&mut self, op0: A, op1: B, op2: C);
1146}
1147
1148impl<'a> VandpdMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
1149    fn vandpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
1150        self.emit(
1151            VANDPD128RRR_MASKZ,
1152            op0.as_operand(),
1153            op1.as_operand(),
1154            op2.as_operand(),
1155            &NOREG,
1156        );
1157    }
1158}
1159
1160impl<'a> VandpdMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
1161    fn vandpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
1162        self.emit(
1163            VANDPD128RRM_MASKZ,
1164            op0.as_operand(),
1165            op1.as_operand(),
1166            op2.as_operand(),
1167            &NOREG,
1168        );
1169    }
1170}
1171
1172impl<'a> VandpdMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
1173    fn vandpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
1174        self.emit(
1175            VANDPD256RRR_MASKZ,
1176            op0.as_operand(),
1177            op1.as_operand(),
1178            op2.as_operand(),
1179            &NOREG,
1180        );
1181    }
1182}
1183
1184impl<'a> VandpdMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
1185    fn vandpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
1186        self.emit(
1187            VANDPD256RRM_MASKZ,
1188            op0.as_operand(),
1189            op1.as_operand(),
1190            op2.as_operand(),
1191            &NOREG,
1192        );
1193    }
1194}
1195
1196impl<'a> VandpdMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
1197    fn vandpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
1198        self.emit(
1199            VANDPD512RRR_MASKZ,
1200            op0.as_operand(),
1201            op1.as_operand(),
1202            op2.as_operand(),
1203            &NOREG,
1204        );
1205    }
1206}
1207
1208impl<'a> VandpdMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
1209    fn vandpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
1210        self.emit(
1211            VANDPD512RRM_MASKZ,
1212            op0.as_operand(),
1213            op1.as_operand(),
1214            op2.as_operand(),
1215            &NOREG,
1216        );
1217    }
1218}
1219
1220/// `VANDPS`.
1221///
1222/// Supported operand variants:
1223///
1224/// ```text
1225/// +---+---------------+
1226/// | # | Operands      |
1227/// +---+---------------+
1228/// | 1 | Xmm, Xmm, Mem |
1229/// | 2 | Xmm, Xmm, Xmm |
1230/// | 3 | Ymm, Ymm, Mem |
1231/// | 4 | Ymm, Ymm, Ymm |
1232/// | 5 | Zmm, Zmm, Mem |
1233/// | 6 | Zmm, Zmm, Zmm |
1234/// +---+---------------+
1235/// ```
1236pub trait VandpsEmitter<A, B, C> {
1237    fn vandps(&mut self, op0: A, op1: B, op2: C);
1238}
1239
1240impl<'a> VandpsEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
1241    fn vandps(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
1242        self.emit(
1243            VANDPS128RRR,
1244            op0.as_operand(),
1245            op1.as_operand(),
1246            op2.as_operand(),
1247            &NOREG,
1248        );
1249    }
1250}
1251
1252impl<'a> VandpsEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
1253    fn vandps(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
1254        self.emit(
1255            VANDPS128RRM,
1256            op0.as_operand(),
1257            op1.as_operand(),
1258            op2.as_operand(),
1259            &NOREG,
1260        );
1261    }
1262}
1263
1264impl<'a> VandpsEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
1265    fn vandps(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
1266        self.emit(
1267            VANDPS256RRR,
1268            op0.as_operand(),
1269            op1.as_operand(),
1270            op2.as_operand(),
1271            &NOREG,
1272        );
1273    }
1274}
1275
1276impl<'a> VandpsEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
1277    fn vandps(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
1278        self.emit(
1279            VANDPS256RRM,
1280            op0.as_operand(),
1281            op1.as_operand(),
1282            op2.as_operand(),
1283            &NOREG,
1284        );
1285    }
1286}
1287
1288impl<'a> VandpsEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
1289    fn vandps(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
1290        self.emit(
1291            VANDPS512RRR,
1292            op0.as_operand(),
1293            op1.as_operand(),
1294            op2.as_operand(),
1295            &NOREG,
1296        );
1297    }
1298}
1299
1300impl<'a> VandpsEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
1301    fn vandps(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
1302        self.emit(
1303            VANDPS512RRM,
1304            op0.as_operand(),
1305            op1.as_operand(),
1306            op2.as_operand(),
1307            &NOREG,
1308        );
1309    }
1310}
1311
1312/// `VANDPS_MASK`.
1313///
1314/// Supported operand variants:
1315///
1316/// ```text
1317/// +---+---------------+
1318/// | # | Operands      |
1319/// +---+---------------+
1320/// | 1 | Xmm, Xmm, Mem |
1321/// | 2 | Xmm, Xmm, Xmm |
1322/// | 3 | Ymm, Ymm, Mem |
1323/// | 4 | Ymm, Ymm, Ymm |
1324/// | 5 | Zmm, Zmm, Mem |
1325/// | 6 | Zmm, Zmm, Zmm |
1326/// +---+---------------+
1327/// ```
1328pub trait VandpsMaskEmitter<A, B, C> {
1329    fn vandps_mask(&mut self, op0: A, op1: B, op2: C);
1330}
1331
1332impl<'a> VandpsMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
1333    fn vandps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
1334        self.emit(
1335            VANDPS128RRR_MASK,
1336            op0.as_operand(),
1337            op1.as_operand(),
1338            op2.as_operand(),
1339            &NOREG,
1340        );
1341    }
1342}
1343
1344impl<'a> VandpsMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
1345    fn vandps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
1346        self.emit(
1347            VANDPS128RRM_MASK,
1348            op0.as_operand(),
1349            op1.as_operand(),
1350            op2.as_operand(),
1351            &NOREG,
1352        );
1353    }
1354}
1355
1356impl<'a> VandpsMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
1357    fn vandps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
1358        self.emit(
1359            VANDPS256RRR_MASK,
1360            op0.as_operand(),
1361            op1.as_operand(),
1362            op2.as_operand(),
1363            &NOREG,
1364        );
1365    }
1366}
1367
1368impl<'a> VandpsMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
1369    fn vandps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
1370        self.emit(
1371            VANDPS256RRM_MASK,
1372            op0.as_operand(),
1373            op1.as_operand(),
1374            op2.as_operand(),
1375            &NOREG,
1376        );
1377    }
1378}
1379
1380impl<'a> VandpsMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
1381    fn vandps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
1382        self.emit(
1383            VANDPS512RRR_MASK,
1384            op0.as_operand(),
1385            op1.as_operand(),
1386            op2.as_operand(),
1387            &NOREG,
1388        );
1389    }
1390}
1391
1392impl<'a> VandpsMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
1393    fn vandps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
1394        self.emit(
1395            VANDPS512RRM_MASK,
1396            op0.as_operand(),
1397            op1.as_operand(),
1398            op2.as_operand(),
1399            &NOREG,
1400        );
1401    }
1402}
1403
1404/// `VANDPS_MASKZ`.
1405///
1406/// Supported operand variants:
1407///
1408/// ```text
1409/// +---+---------------+
1410/// | # | Operands      |
1411/// +---+---------------+
1412/// | 1 | Xmm, Xmm, Mem |
1413/// | 2 | Xmm, Xmm, Xmm |
1414/// | 3 | Ymm, Ymm, Mem |
1415/// | 4 | Ymm, Ymm, Ymm |
1416/// | 5 | Zmm, Zmm, Mem |
1417/// | 6 | Zmm, Zmm, Zmm |
1418/// +---+---------------+
1419/// ```
1420pub trait VandpsMaskzEmitter<A, B, C> {
1421    fn vandps_maskz(&mut self, op0: A, op1: B, op2: C);
1422}
1423
1424impl<'a> VandpsMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
1425    fn vandps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
1426        self.emit(
1427            VANDPS128RRR_MASKZ,
1428            op0.as_operand(),
1429            op1.as_operand(),
1430            op2.as_operand(),
1431            &NOREG,
1432        );
1433    }
1434}
1435
1436impl<'a> VandpsMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
1437    fn vandps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
1438        self.emit(
1439            VANDPS128RRM_MASKZ,
1440            op0.as_operand(),
1441            op1.as_operand(),
1442            op2.as_operand(),
1443            &NOREG,
1444        );
1445    }
1446}
1447
1448impl<'a> VandpsMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
1449    fn vandps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
1450        self.emit(
1451            VANDPS256RRR_MASKZ,
1452            op0.as_operand(),
1453            op1.as_operand(),
1454            op2.as_operand(),
1455            &NOREG,
1456        );
1457    }
1458}
1459
1460impl<'a> VandpsMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
1461    fn vandps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
1462        self.emit(
1463            VANDPS256RRM_MASKZ,
1464            op0.as_operand(),
1465            op1.as_operand(),
1466            op2.as_operand(),
1467            &NOREG,
1468        );
1469    }
1470}
1471
1472impl<'a> VandpsMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
1473    fn vandps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
1474        self.emit(
1475            VANDPS512RRR_MASKZ,
1476            op0.as_operand(),
1477            op1.as_operand(),
1478            op2.as_operand(),
1479            &NOREG,
1480        );
1481    }
1482}
1483
1484impl<'a> VandpsMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
1485    fn vandps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
1486        self.emit(
1487            VANDPS512RRM_MASKZ,
1488            op0.as_operand(),
1489            op1.as_operand(),
1490            op2.as_operand(),
1491            &NOREG,
1492        );
1493    }
1494}
1495
1496/// `VBROADCASTF32X2`.
1497///
1498/// Supported operand variants:
1499///
1500/// ```text
1501/// +---+----------+
1502/// | # | Operands |
1503/// +---+----------+
1504/// | 1 | Ymm, Mem |
1505/// | 2 | Ymm, Xmm |
1506/// | 3 | Zmm, Mem |
1507/// | 4 | Zmm, Xmm |
1508/// +---+----------+
1509/// ```
1510pub trait Vbroadcastf32x2Emitter<A, B> {
1511    fn vbroadcastf32x2(&mut self, op0: A, op1: B);
1512}
1513
1514impl<'a> Vbroadcastf32x2Emitter<Ymm, Xmm> for Assembler<'a> {
1515    fn vbroadcastf32x2(&mut self, op0: Ymm, op1: Xmm) {
1516        self.emit(
1517            VBROADCASTF32X2_256RR,
1518            op0.as_operand(),
1519            op1.as_operand(),
1520            &NOREG,
1521            &NOREG,
1522        );
1523    }
1524}
1525
1526impl<'a> Vbroadcastf32x2Emitter<Ymm, Mem> for Assembler<'a> {
1527    fn vbroadcastf32x2(&mut self, op0: Ymm, op1: Mem) {
1528        self.emit(
1529            VBROADCASTF32X2_256RM,
1530            op0.as_operand(),
1531            op1.as_operand(),
1532            &NOREG,
1533            &NOREG,
1534        );
1535    }
1536}
1537
1538impl<'a> Vbroadcastf32x2Emitter<Zmm, Xmm> for Assembler<'a> {
1539    fn vbroadcastf32x2(&mut self, op0: Zmm, op1: Xmm) {
1540        self.emit(
1541            VBROADCASTF32X2_512RR,
1542            op0.as_operand(),
1543            op1.as_operand(),
1544            &NOREG,
1545            &NOREG,
1546        );
1547    }
1548}
1549
1550impl<'a> Vbroadcastf32x2Emitter<Zmm, Mem> for Assembler<'a> {
1551    fn vbroadcastf32x2(&mut self, op0: Zmm, op1: Mem) {
1552        self.emit(
1553            VBROADCASTF32X2_512RM,
1554            op0.as_operand(),
1555            op1.as_operand(),
1556            &NOREG,
1557            &NOREG,
1558        );
1559    }
1560}
1561
1562/// `VBROADCASTF32X2_MASK`.
1563///
1564/// Supported operand variants:
1565///
1566/// ```text
1567/// +---+----------+
1568/// | # | Operands |
1569/// +---+----------+
1570/// | 1 | Ymm, Mem |
1571/// | 2 | Ymm, Xmm |
1572/// | 3 | Zmm, Mem |
1573/// | 4 | Zmm, Xmm |
1574/// +---+----------+
1575/// ```
1576pub trait Vbroadcastf32x2MaskEmitter<A, B> {
1577    fn vbroadcastf32x2_mask(&mut self, op0: A, op1: B);
1578}
1579
1580impl<'a> Vbroadcastf32x2MaskEmitter<Ymm, Xmm> for Assembler<'a> {
1581    fn vbroadcastf32x2_mask(&mut self, op0: Ymm, op1: Xmm) {
1582        self.emit(
1583            VBROADCASTF32X2_256RR_MASK,
1584            op0.as_operand(),
1585            op1.as_operand(),
1586            &NOREG,
1587            &NOREG,
1588        );
1589    }
1590}
1591
1592impl<'a> Vbroadcastf32x2MaskEmitter<Ymm, Mem> for Assembler<'a> {
1593    fn vbroadcastf32x2_mask(&mut self, op0: Ymm, op1: Mem) {
1594        self.emit(
1595            VBROADCASTF32X2_256RM_MASK,
1596            op0.as_operand(),
1597            op1.as_operand(),
1598            &NOREG,
1599            &NOREG,
1600        );
1601    }
1602}
1603
1604impl<'a> Vbroadcastf32x2MaskEmitter<Zmm, Xmm> for Assembler<'a> {
1605    fn vbroadcastf32x2_mask(&mut self, op0: Zmm, op1: Xmm) {
1606        self.emit(
1607            VBROADCASTF32X2_512RR_MASK,
1608            op0.as_operand(),
1609            op1.as_operand(),
1610            &NOREG,
1611            &NOREG,
1612        );
1613    }
1614}
1615
1616impl<'a> Vbroadcastf32x2MaskEmitter<Zmm, Mem> for Assembler<'a> {
1617    fn vbroadcastf32x2_mask(&mut self, op0: Zmm, op1: Mem) {
1618        self.emit(
1619            VBROADCASTF32X2_512RM_MASK,
1620            op0.as_operand(),
1621            op1.as_operand(),
1622            &NOREG,
1623            &NOREG,
1624        );
1625    }
1626}
1627
1628/// `VBROADCASTF32X2_MASKZ`.
1629///
1630/// Supported operand variants:
1631///
1632/// ```text
1633/// +---+----------+
1634/// | # | Operands |
1635/// +---+----------+
1636/// | 1 | Ymm, Mem |
1637/// | 2 | Ymm, Xmm |
1638/// | 3 | Zmm, Mem |
1639/// | 4 | Zmm, Xmm |
1640/// +---+----------+
1641/// ```
1642pub trait Vbroadcastf32x2MaskzEmitter<A, B> {
1643    fn vbroadcastf32x2_maskz(&mut self, op0: A, op1: B);
1644}
1645
1646impl<'a> Vbroadcastf32x2MaskzEmitter<Ymm, Xmm> for Assembler<'a> {
1647    fn vbroadcastf32x2_maskz(&mut self, op0: Ymm, op1: Xmm) {
1648        self.emit(
1649            VBROADCASTF32X2_256RR_MASKZ,
1650            op0.as_operand(),
1651            op1.as_operand(),
1652            &NOREG,
1653            &NOREG,
1654        );
1655    }
1656}
1657
1658impl<'a> Vbroadcastf32x2MaskzEmitter<Ymm, Mem> for Assembler<'a> {
1659    fn vbroadcastf32x2_maskz(&mut self, op0: Ymm, op1: Mem) {
1660        self.emit(
1661            VBROADCASTF32X2_256RM_MASKZ,
1662            op0.as_operand(),
1663            op1.as_operand(),
1664            &NOREG,
1665            &NOREG,
1666        );
1667    }
1668}
1669
1670impl<'a> Vbroadcastf32x2MaskzEmitter<Zmm, Xmm> for Assembler<'a> {
1671    fn vbroadcastf32x2_maskz(&mut self, op0: Zmm, op1: Xmm) {
1672        self.emit(
1673            VBROADCASTF32X2_512RR_MASKZ,
1674            op0.as_operand(),
1675            op1.as_operand(),
1676            &NOREG,
1677            &NOREG,
1678        );
1679    }
1680}
1681
1682impl<'a> Vbroadcastf32x2MaskzEmitter<Zmm, Mem> for Assembler<'a> {
1683    fn vbroadcastf32x2_maskz(&mut self, op0: Zmm, op1: Mem) {
1684        self.emit(
1685            VBROADCASTF32X2_512RM_MASKZ,
1686            op0.as_operand(),
1687            op1.as_operand(),
1688            &NOREG,
1689            &NOREG,
1690        );
1691    }
1692}
1693
1694/// `VBROADCASTF32X8`.
1695///
1696/// Supported operand variants:
1697///
1698/// ```text
1699/// +---+----------+
1700/// | # | Operands |
1701/// +---+----------+
1702/// | 1 | Zmm, Mem |
1703/// +---+----------+
1704/// ```
1705pub trait Vbroadcastf32x8Emitter<A, B> {
1706    fn vbroadcastf32x8(&mut self, op0: A, op1: B);
1707}
1708
1709impl<'a> Vbroadcastf32x8Emitter<Zmm, Mem> for Assembler<'a> {
1710    fn vbroadcastf32x8(&mut self, op0: Zmm, op1: Mem) {
1711        self.emit(
1712            VBROADCASTF32X8_512RM,
1713            op0.as_operand(),
1714            op1.as_operand(),
1715            &NOREG,
1716            &NOREG,
1717        );
1718    }
1719}
1720
1721/// `VBROADCASTF32X8_MASK`.
1722///
1723/// Supported operand variants:
1724///
1725/// ```text
1726/// +---+----------+
1727/// | # | Operands |
1728/// +---+----------+
1729/// | 1 | Zmm, Mem |
1730/// +---+----------+
1731/// ```
1732pub trait Vbroadcastf32x8MaskEmitter<A, B> {
1733    fn vbroadcastf32x8_mask(&mut self, op0: A, op1: B);
1734}
1735
1736impl<'a> Vbroadcastf32x8MaskEmitter<Zmm, Mem> for Assembler<'a> {
1737    fn vbroadcastf32x8_mask(&mut self, op0: Zmm, op1: Mem) {
1738        self.emit(
1739            VBROADCASTF32X8_512RM_MASK,
1740            op0.as_operand(),
1741            op1.as_operand(),
1742            &NOREG,
1743            &NOREG,
1744        );
1745    }
1746}
1747
1748/// `VBROADCASTF32X8_MASKZ`.
1749///
1750/// Supported operand variants:
1751///
1752/// ```text
1753/// +---+----------+
1754/// | # | Operands |
1755/// +---+----------+
1756/// | 1 | Zmm, Mem |
1757/// +---+----------+
1758/// ```
1759pub trait Vbroadcastf32x8MaskzEmitter<A, B> {
1760    fn vbroadcastf32x8_maskz(&mut self, op0: A, op1: B);
1761}
1762
1763impl<'a> Vbroadcastf32x8MaskzEmitter<Zmm, Mem> for Assembler<'a> {
1764    fn vbroadcastf32x8_maskz(&mut self, op0: Zmm, op1: Mem) {
1765        self.emit(
1766            VBROADCASTF32X8_512RM_MASKZ,
1767            op0.as_operand(),
1768            op1.as_operand(),
1769            &NOREG,
1770            &NOREG,
1771        );
1772    }
1773}
1774
1775/// `VBROADCASTF64X2`.
1776///
1777/// Supported operand variants:
1778///
1779/// ```text
1780/// +---+----------+
1781/// | # | Operands |
1782/// +---+----------+
1783/// | 1 | Ymm, Mem |
1784/// | 2 | Zmm, Mem |
1785/// +---+----------+
1786/// ```
1787pub trait Vbroadcastf64x2Emitter<A, B> {
1788    fn vbroadcastf64x2(&mut self, op0: A, op1: B);
1789}
1790
1791impl<'a> Vbroadcastf64x2Emitter<Ymm, Mem> for Assembler<'a> {
1792    fn vbroadcastf64x2(&mut self, op0: Ymm, op1: Mem) {
1793        self.emit(
1794            VBROADCASTF64X2_256RM,
1795            op0.as_operand(),
1796            op1.as_operand(),
1797            &NOREG,
1798            &NOREG,
1799        );
1800    }
1801}
1802
1803impl<'a> Vbroadcastf64x2Emitter<Zmm, Mem> for Assembler<'a> {
1804    fn vbroadcastf64x2(&mut self, op0: Zmm, op1: Mem) {
1805        self.emit(
1806            VBROADCASTF64X2_512RM,
1807            op0.as_operand(),
1808            op1.as_operand(),
1809            &NOREG,
1810            &NOREG,
1811        );
1812    }
1813}
1814
1815/// `VBROADCASTF64X2_MASK`.
1816///
1817/// Supported operand variants:
1818///
1819/// ```text
1820/// +---+----------+
1821/// | # | Operands |
1822/// +---+----------+
1823/// | 1 | Ymm, Mem |
1824/// | 2 | Zmm, Mem |
1825/// +---+----------+
1826/// ```
1827pub trait Vbroadcastf64x2MaskEmitter<A, B> {
1828    fn vbroadcastf64x2_mask(&mut self, op0: A, op1: B);
1829}
1830
1831impl<'a> Vbroadcastf64x2MaskEmitter<Ymm, Mem> for Assembler<'a> {
1832    fn vbroadcastf64x2_mask(&mut self, op0: Ymm, op1: Mem) {
1833        self.emit(
1834            VBROADCASTF64X2_256RM_MASK,
1835            op0.as_operand(),
1836            op1.as_operand(),
1837            &NOREG,
1838            &NOREG,
1839        );
1840    }
1841}
1842
1843impl<'a> Vbroadcastf64x2MaskEmitter<Zmm, Mem> for Assembler<'a> {
1844    fn vbroadcastf64x2_mask(&mut self, op0: Zmm, op1: Mem) {
1845        self.emit(
1846            VBROADCASTF64X2_512RM_MASK,
1847            op0.as_operand(),
1848            op1.as_operand(),
1849            &NOREG,
1850            &NOREG,
1851        );
1852    }
1853}
1854
1855/// `VBROADCASTF64X2_MASKZ`.
1856///
1857/// Supported operand variants:
1858///
1859/// ```text
1860/// +---+----------+
1861/// | # | Operands |
1862/// +---+----------+
1863/// | 1 | Ymm, Mem |
1864/// | 2 | Zmm, Mem |
1865/// +---+----------+
1866/// ```
1867pub trait Vbroadcastf64x2MaskzEmitter<A, B> {
1868    fn vbroadcastf64x2_maskz(&mut self, op0: A, op1: B);
1869}
1870
1871impl<'a> Vbroadcastf64x2MaskzEmitter<Ymm, Mem> for Assembler<'a> {
1872    fn vbroadcastf64x2_maskz(&mut self, op0: Ymm, op1: Mem) {
1873        self.emit(
1874            VBROADCASTF64X2_256RM_MASKZ,
1875            op0.as_operand(),
1876            op1.as_operand(),
1877            &NOREG,
1878            &NOREG,
1879        );
1880    }
1881}
1882
1883impl<'a> Vbroadcastf64x2MaskzEmitter<Zmm, Mem> for Assembler<'a> {
1884    fn vbroadcastf64x2_maskz(&mut self, op0: Zmm, op1: Mem) {
1885        self.emit(
1886            VBROADCASTF64X2_512RM_MASKZ,
1887            op0.as_operand(),
1888            op1.as_operand(),
1889            &NOREG,
1890            &NOREG,
1891        );
1892    }
1893}
1894
1895/// `VBROADCASTI32X2`.
1896///
1897/// Supported operand variants:
1898///
1899/// ```text
1900/// +---+----------+
1901/// | # | Operands |
1902/// +---+----------+
1903/// | 1 | Xmm, Mem |
1904/// | 2 | Xmm, Xmm |
1905/// | 3 | Ymm, Mem |
1906/// | 4 | Ymm, Xmm |
1907/// | 5 | Zmm, Mem |
1908/// | 6 | Zmm, Xmm |
1909/// +---+----------+
1910/// ```
1911pub trait Vbroadcasti32x2Emitter<A, B> {
1912    fn vbroadcasti32x2(&mut self, op0: A, op1: B);
1913}
1914
1915impl<'a> Vbroadcasti32x2Emitter<Xmm, Xmm> for Assembler<'a> {
1916    fn vbroadcasti32x2(&mut self, op0: Xmm, op1: Xmm) {
1917        self.emit(
1918            VBROADCASTI32X2_128RR,
1919            op0.as_operand(),
1920            op1.as_operand(),
1921            &NOREG,
1922            &NOREG,
1923        );
1924    }
1925}
1926
1927impl<'a> Vbroadcasti32x2Emitter<Xmm, Mem> for Assembler<'a> {
1928    fn vbroadcasti32x2(&mut self, op0: Xmm, op1: Mem) {
1929        self.emit(
1930            VBROADCASTI32X2_128RM,
1931            op0.as_operand(),
1932            op1.as_operand(),
1933            &NOREG,
1934            &NOREG,
1935        );
1936    }
1937}
1938
1939impl<'a> Vbroadcasti32x2Emitter<Ymm, Xmm> for Assembler<'a> {
1940    fn vbroadcasti32x2(&mut self, op0: Ymm, op1: Xmm) {
1941        self.emit(
1942            VBROADCASTI32X2_256RR,
1943            op0.as_operand(),
1944            op1.as_operand(),
1945            &NOREG,
1946            &NOREG,
1947        );
1948    }
1949}
1950
1951impl<'a> Vbroadcasti32x2Emitter<Ymm, Mem> for Assembler<'a> {
1952    fn vbroadcasti32x2(&mut self, op0: Ymm, op1: Mem) {
1953        self.emit(
1954            VBROADCASTI32X2_256RM,
1955            op0.as_operand(),
1956            op1.as_operand(),
1957            &NOREG,
1958            &NOREG,
1959        );
1960    }
1961}
1962
1963impl<'a> Vbroadcasti32x2Emitter<Zmm, Xmm> for Assembler<'a> {
1964    fn vbroadcasti32x2(&mut self, op0: Zmm, op1: Xmm) {
1965        self.emit(
1966            VBROADCASTI32X2_512RR,
1967            op0.as_operand(),
1968            op1.as_operand(),
1969            &NOREG,
1970            &NOREG,
1971        );
1972    }
1973}
1974
1975impl<'a> Vbroadcasti32x2Emitter<Zmm, Mem> for Assembler<'a> {
1976    fn vbroadcasti32x2(&mut self, op0: Zmm, op1: Mem) {
1977        self.emit(
1978            VBROADCASTI32X2_512RM,
1979            op0.as_operand(),
1980            op1.as_operand(),
1981            &NOREG,
1982            &NOREG,
1983        );
1984    }
1985}
1986
1987/// `VBROADCASTI32X2_MASK`.
1988///
1989/// Supported operand variants:
1990///
1991/// ```text
1992/// +---+----------+
1993/// | # | Operands |
1994/// +---+----------+
1995/// | 1 | Xmm, Mem |
1996/// | 2 | Xmm, Xmm |
1997/// | 3 | Ymm, Mem |
1998/// | 4 | Ymm, Xmm |
1999/// | 5 | Zmm, Mem |
2000/// | 6 | Zmm, Xmm |
2001/// +---+----------+
2002/// ```
2003pub trait Vbroadcasti32x2MaskEmitter<A, B> {
2004    fn vbroadcasti32x2_mask(&mut self, op0: A, op1: B);
2005}
2006
2007impl<'a> Vbroadcasti32x2MaskEmitter<Xmm, Xmm> for Assembler<'a> {
2008    fn vbroadcasti32x2_mask(&mut self, op0: Xmm, op1: Xmm) {
2009        self.emit(
2010            VBROADCASTI32X2_128RR_MASK,
2011            op0.as_operand(),
2012            op1.as_operand(),
2013            &NOREG,
2014            &NOREG,
2015        );
2016    }
2017}
2018
2019impl<'a> Vbroadcasti32x2MaskEmitter<Xmm, Mem> for Assembler<'a> {
2020    fn vbroadcasti32x2_mask(&mut self, op0: Xmm, op1: Mem) {
2021        self.emit(
2022            VBROADCASTI32X2_128RM_MASK,
2023            op0.as_operand(),
2024            op1.as_operand(),
2025            &NOREG,
2026            &NOREG,
2027        );
2028    }
2029}
2030
2031impl<'a> Vbroadcasti32x2MaskEmitter<Ymm, Xmm> for Assembler<'a> {
2032    fn vbroadcasti32x2_mask(&mut self, op0: Ymm, op1: Xmm) {
2033        self.emit(
2034            VBROADCASTI32X2_256RR_MASK,
2035            op0.as_operand(),
2036            op1.as_operand(),
2037            &NOREG,
2038            &NOREG,
2039        );
2040    }
2041}
2042
2043impl<'a> Vbroadcasti32x2MaskEmitter<Ymm, Mem> for Assembler<'a> {
2044    fn vbroadcasti32x2_mask(&mut self, op0: Ymm, op1: Mem) {
2045        self.emit(
2046            VBROADCASTI32X2_256RM_MASK,
2047            op0.as_operand(),
2048            op1.as_operand(),
2049            &NOREG,
2050            &NOREG,
2051        );
2052    }
2053}
2054
2055impl<'a> Vbroadcasti32x2MaskEmitter<Zmm, Xmm> for Assembler<'a> {
2056    fn vbroadcasti32x2_mask(&mut self, op0: Zmm, op1: Xmm) {
2057        self.emit(
2058            VBROADCASTI32X2_512RR_MASK,
2059            op0.as_operand(),
2060            op1.as_operand(),
2061            &NOREG,
2062            &NOREG,
2063        );
2064    }
2065}
2066
2067impl<'a> Vbroadcasti32x2MaskEmitter<Zmm, Mem> for Assembler<'a> {
2068    fn vbroadcasti32x2_mask(&mut self, op0: Zmm, op1: Mem) {
2069        self.emit(
2070            VBROADCASTI32X2_512RM_MASK,
2071            op0.as_operand(),
2072            op1.as_operand(),
2073            &NOREG,
2074            &NOREG,
2075        );
2076    }
2077}
2078
2079/// `VBROADCASTI32X2_MASKZ`.
2080///
2081/// Supported operand variants:
2082///
2083/// ```text
2084/// +---+----------+
2085/// | # | Operands |
2086/// +---+----------+
2087/// | 1 | Xmm, Mem |
2088/// | 2 | Xmm, Xmm |
2089/// | 3 | Ymm, Mem |
2090/// | 4 | Ymm, Xmm |
2091/// | 5 | Zmm, Mem |
2092/// | 6 | Zmm, Xmm |
2093/// +---+----------+
2094/// ```
2095pub trait Vbroadcasti32x2MaskzEmitter<A, B> {
2096    fn vbroadcasti32x2_maskz(&mut self, op0: A, op1: B);
2097}
2098
2099impl<'a> Vbroadcasti32x2MaskzEmitter<Xmm, Xmm> for Assembler<'a> {
2100    fn vbroadcasti32x2_maskz(&mut self, op0: Xmm, op1: Xmm) {
2101        self.emit(
2102            VBROADCASTI32X2_128RR_MASKZ,
2103            op0.as_operand(),
2104            op1.as_operand(),
2105            &NOREG,
2106            &NOREG,
2107        );
2108    }
2109}
2110
2111impl<'a> Vbroadcasti32x2MaskzEmitter<Xmm, Mem> for Assembler<'a> {
2112    fn vbroadcasti32x2_maskz(&mut self, op0: Xmm, op1: Mem) {
2113        self.emit(
2114            VBROADCASTI32X2_128RM_MASKZ,
2115            op0.as_operand(),
2116            op1.as_operand(),
2117            &NOREG,
2118            &NOREG,
2119        );
2120    }
2121}
2122
2123impl<'a> Vbroadcasti32x2MaskzEmitter<Ymm, Xmm> for Assembler<'a> {
2124    fn vbroadcasti32x2_maskz(&mut self, op0: Ymm, op1: Xmm) {
2125        self.emit(
2126            VBROADCASTI32X2_256RR_MASKZ,
2127            op0.as_operand(),
2128            op1.as_operand(),
2129            &NOREG,
2130            &NOREG,
2131        );
2132    }
2133}
2134
2135impl<'a> Vbroadcasti32x2MaskzEmitter<Ymm, Mem> for Assembler<'a> {
2136    fn vbroadcasti32x2_maskz(&mut self, op0: Ymm, op1: Mem) {
2137        self.emit(
2138            VBROADCASTI32X2_256RM_MASKZ,
2139            op0.as_operand(),
2140            op1.as_operand(),
2141            &NOREG,
2142            &NOREG,
2143        );
2144    }
2145}
2146
2147impl<'a> Vbroadcasti32x2MaskzEmitter<Zmm, Xmm> for Assembler<'a> {
2148    fn vbroadcasti32x2_maskz(&mut self, op0: Zmm, op1: Xmm) {
2149        self.emit(
2150            VBROADCASTI32X2_512RR_MASKZ,
2151            op0.as_operand(),
2152            op1.as_operand(),
2153            &NOREG,
2154            &NOREG,
2155        );
2156    }
2157}
2158
2159impl<'a> Vbroadcasti32x2MaskzEmitter<Zmm, Mem> for Assembler<'a> {
2160    fn vbroadcasti32x2_maskz(&mut self, op0: Zmm, op1: Mem) {
2161        self.emit(
2162            VBROADCASTI32X2_512RM_MASKZ,
2163            op0.as_operand(),
2164            op1.as_operand(),
2165            &NOREG,
2166            &NOREG,
2167        );
2168    }
2169}
2170
2171/// `VBROADCASTI32X4`.
2172///
2173/// Supported operand variants:
2174///
2175/// ```text
2176/// +---+----------+
2177/// | # | Operands |
2178/// +---+----------+
2179/// | 1 | Ymm, Mem |
2180/// | 2 | Zmm, Mem |
2181/// +---+----------+
2182/// ```
2183pub trait Vbroadcasti32x4Emitter<A, B> {
2184    fn vbroadcasti32x4(&mut self, op0: A, op1: B);
2185}
2186
2187impl<'a> Vbroadcasti32x4Emitter<Ymm, Mem> for Assembler<'a> {
2188    fn vbroadcasti32x4(&mut self, op0: Ymm, op1: Mem) {
2189        self.emit(
2190            VBROADCASTI32X4_256RM,
2191            op0.as_operand(),
2192            op1.as_operand(),
2193            &NOREG,
2194            &NOREG,
2195        );
2196    }
2197}
2198
2199impl<'a> Vbroadcasti32x4Emitter<Zmm, Mem> for Assembler<'a> {
2200    fn vbroadcasti32x4(&mut self, op0: Zmm, op1: Mem) {
2201        self.emit(
2202            VBROADCASTI32X4_512RM,
2203            op0.as_operand(),
2204            op1.as_operand(),
2205            &NOREG,
2206            &NOREG,
2207        );
2208    }
2209}
2210
2211/// `VBROADCASTI32X4_MASK`.
2212///
2213/// Supported operand variants:
2214///
2215/// ```text
2216/// +---+----------+
2217/// | # | Operands |
2218/// +---+----------+
2219/// | 1 | Ymm, Mem |
2220/// | 2 | Zmm, Mem |
2221/// +---+----------+
2222/// ```
2223pub trait Vbroadcasti32x4MaskEmitter<A, B> {
2224    fn vbroadcasti32x4_mask(&mut self, op0: A, op1: B);
2225}
2226
2227impl<'a> Vbroadcasti32x4MaskEmitter<Ymm, Mem> for Assembler<'a> {
2228    fn vbroadcasti32x4_mask(&mut self, op0: Ymm, op1: Mem) {
2229        self.emit(
2230            VBROADCASTI32X4_256RM_MASK,
2231            op0.as_operand(),
2232            op1.as_operand(),
2233            &NOREG,
2234            &NOREG,
2235        );
2236    }
2237}
2238
2239impl<'a> Vbroadcasti32x4MaskEmitter<Zmm, Mem> for Assembler<'a> {
2240    fn vbroadcasti32x4_mask(&mut self, op0: Zmm, op1: Mem) {
2241        self.emit(
2242            VBROADCASTI32X4_512RM_MASK,
2243            op0.as_operand(),
2244            op1.as_operand(),
2245            &NOREG,
2246            &NOREG,
2247        );
2248    }
2249}
2250
2251/// `VBROADCASTI32X4_MASKZ`.
2252///
2253/// Supported operand variants:
2254///
2255/// ```text
2256/// +---+----------+
2257/// | # | Operands |
2258/// +---+----------+
2259/// | 1 | Ymm, Mem |
2260/// | 2 | Zmm, Mem |
2261/// +---+----------+
2262/// ```
2263pub trait Vbroadcasti32x4MaskzEmitter<A, B> {
2264    fn vbroadcasti32x4_maskz(&mut self, op0: A, op1: B);
2265}
2266
2267impl<'a> Vbroadcasti32x4MaskzEmitter<Ymm, Mem> for Assembler<'a> {
2268    fn vbroadcasti32x4_maskz(&mut self, op0: Ymm, op1: Mem) {
2269        self.emit(
2270            VBROADCASTI32X4_256RM_MASKZ,
2271            op0.as_operand(),
2272            op1.as_operand(),
2273            &NOREG,
2274            &NOREG,
2275        );
2276    }
2277}
2278
2279impl<'a> Vbroadcasti32x4MaskzEmitter<Zmm, Mem> for Assembler<'a> {
2280    fn vbroadcasti32x4_maskz(&mut self, op0: Zmm, op1: Mem) {
2281        self.emit(
2282            VBROADCASTI32X4_512RM_MASKZ,
2283            op0.as_operand(),
2284            op1.as_operand(),
2285            &NOREG,
2286            &NOREG,
2287        );
2288    }
2289}
2290
2291/// `VBROADCASTI32X8`.
2292///
2293/// Supported operand variants:
2294///
2295/// ```text
2296/// +---+----------+
2297/// | # | Operands |
2298/// +---+----------+
2299/// | 1 | Zmm, Mem |
2300/// +---+----------+
2301/// ```
2302pub trait Vbroadcasti32x8Emitter<A, B> {
2303    fn vbroadcasti32x8(&mut self, op0: A, op1: B);
2304}
2305
2306impl<'a> Vbroadcasti32x8Emitter<Zmm, Mem> for Assembler<'a> {
2307    fn vbroadcasti32x8(&mut self, op0: Zmm, op1: Mem) {
2308        self.emit(
2309            VBROADCASTI32X8_512RM,
2310            op0.as_operand(),
2311            op1.as_operand(),
2312            &NOREG,
2313            &NOREG,
2314        );
2315    }
2316}
2317
2318/// `VBROADCASTI32X8_MASK`.
2319///
2320/// Supported operand variants:
2321///
2322/// ```text
2323/// +---+----------+
2324/// | # | Operands |
2325/// +---+----------+
2326/// | 1 | Zmm, Mem |
2327/// +---+----------+
2328/// ```
2329pub trait Vbroadcasti32x8MaskEmitter<A, B> {
2330    fn vbroadcasti32x8_mask(&mut self, op0: A, op1: B);
2331}
2332
2333impl<'a> Vbroadcasti32x8MaskEmitter<Zmm, Mem> for Assembler<'a> {
2334    fn vbroadcasti32x8_mask(&mut self, op0: Zmm, op1: Mem) {
2335        self.emit(
2336            VBROADCASTI32X8_512RM_MASK,
2337            op0.as_operand(),
2338            op1.as_operand(),
2339            &NOREG,
2340            &NOREG,
2341        );
2342    }
2343}
2344
2345/// `VBROADCASTI32X8_MASKZ`.
2346///
2347/// Supported operand variants:
2348///
2349/// ```text
2350/// +---+----------+
2351/// | # | Operands |
2352/// +---+----------+
2353/// | 1 | Zmm, Mem |
2354/// +---+----------+
2355/// ```
2356pub trait Vbroadcasti32x8MaskzEmitter<A, B> {
2357    fn vbroadcasti32x8_maskz(&mut self, op0: A, op1: B);
2358}
2359
2360impl<'a> Vbroadcasti32x8MaskzEmitter<Zmm, Mem> for Assembler<'a> {
2361    fn vbroadcasti32x8_maskz(&mut self, op0: Zmm, op1: Mem) {
2362        self.emit(
2363            VBROADCASTI32X8_512RM_MASKZ,
2364            op0.as_operand(),
2365            op1.as_operand(),
2366            &NOREG,
2367            &NOREG,
2368        );
2369    }
2370}
2371
2372/// `VBROADCASTI64X2`.
2373///
2374/// Supported operand variants:
2375///
2376/// ```text
2377/// +---+----------+
2378/// | # | Operands |
2379/// +---+----------+
2380/// | 1 | Ymm, Mem |
2381/// | 2 | Zmm, Mem |
2382/// +---+----------+
2383/// ```
2384pub trait Vbroadcasti64x2Emitter<A, B> {
2385    fn vbroadcasti64x2(&mut self, op0: A, op1: B);
2386}
2387
2388impl<'a> Vbroadcasti64x2Emitter<Ymm, Mem> for Assembler<'a> {
2389    fn vbroadcasti64x2(&mut self, op0: Ymm, op1: Mem) {
2390        self.emit(
2391            VBROADCASTI64X2_256RM,
2392            op0.as_operand(),
2393            op1.as_operand(),
2394            &NOREG,
2395            &NOREG,
2396        );
2397    }
2398}
2399
2400impl<'a> Vbroadcasti64x2Emitter<Zmm, Mem> for Assembler<'a> {
2401    fn vbroadcasti64x2(&mut self, op0: Zmm, op1: Mem) {
2402        self.emit(
2403            VBROADCASTI64X2_512RM,
2404            op0.as_operand(),
2405            op1.as_operand(),
2406            &NOREG,
2407            &NOREG,
2408        );
2409    }
2410}
2411
2412/// `VBROADCASTI64X2_MASK`.
2413///
2414/// Supported operand variants:
2415///
2416/// ```text
2417/// +---+----------+
2418/// | # | Operands |
2419/// +---+----------+
2420/// | 1 | Ymm, Mem |
2421/// | 2 | Zmm, Mem |
2422/// +---+----------+
2423/// ```
2424pub trait Vbroadcasti64x2MaskEmitter<A, B> {
2425    fn vbroadcasti64x2_mask(&mut self, op0: A, op1: B);
2426}
2427
2428impl<'a> Vbroadcasti64x2MaskEmitter<Ymm, Mem> for Assembler<'a> {
2429    fn vbroadcasti64x2_mask(&mut self, op0: Ymm, op1: Mem) {
2430        self.emit(
2431            VBROADCASTI64X2_256RM_MASK,
2432            op0.as_operand(),
2433            op1.as_operand(),
2434            &NOREG,
2435            &NOREG,
2436        );
2437    }
2438}
2439
2440impl<'a> Vbroadcasti64x2MaskEmitter<Zmm, Mem> for Assembler<'a> {
2441    fn vbroadcasti64x2_mask(&mut self, op0: Zmm, op1: Mem) {
2442        self.emit(
2443            VBROADCASTI64X2_512RM_MASK,
2444            op0.as_operand(),
2445            op1.as_operand(),
2446            &NOREG,
2447            &NOREG,
2448        );
2449    }
2450}
2451
2452/// `VBROADCASTI64X2_MASKZ`.
2453///
2454/// Supported operand variants:
2455///
2456/// ```text
2457/// +---+----------+
2458/// | # | Operands |
2459/// +---+----------+
2460/// | 1 | Ymm, Mem |
2461/// | 2 | Zmm, Mem |
2462/// +---+----------+
2463/// ```
2464pub trait Vbroadcasti64x2MaskzEmitter<A, B> {
2465    fn vbroadcasti64x2_maskz(&mut self, op0: A, op1: B);
2466}
2467
2468impl<'a> Vbroadcasti64x2MaskzEmitter<Ymm, Mem> for Assembler<'a> {
2469    fn vbroadcasti64x2_maskz(&mut self, op0: Ymm, op1: Mem) {
2470        self.emit(
2471            VBROADCASTI64X2_256RM_MASKZ,
2472            op0.as_operand(),
2473            op1.as_operand(),
2474            &NOREG,
2475            &NOREG,
2476        );
2477    }
2478}
2479
2480impl<'a> Vbroadcasti64x2MaskzEmitter<Zmm, Mem> for Assembler<'a> {
2481    fn vbroadcasti64x2_maskz(&mut self, op0: Zmm, op1: Mem) {
2482        self.emit(
2483            VBROADCASTI64X2_512RM_MASKZ,
2484            op0.as_operand(),
2485            op1.as_operand(),
2486            &NOREG,
2487            &NOREG,
2488        );
2489    }
2490}
2491
2492/// `VCVTPD2QQ`.
2493///
2494/// Supported operand variants:
2495///
2496/// ```text
2497/// +---+----------+
2498/// | # | Operands |
2499/// +---+----------+
2500/// | 1 | Xmm, Mem |
2501/// | 2 | Xmm, Xmm |
2502/// | 3 | Ymm, Mem |
2503/// | 4 | Ymm, Ymm |
2504/// | 5 | Zmm, Mem |
2505/// | 6 | Zmm, Zmm |
2506/// +---+----------+
2507/// ```
2508pub trait Vcvtpd2qqEmitter<A, B> {
2509    fn vcvtpd2qq(&mut self, op0: A, op1: B);
2510}
2511
2512impl<'a> Vcvtpd2qqEmitter<Xmm, Xmm> for Assembler<'a> {
2513    fn vcvtpd2qq(&mut self, op0: Xmm, op1: Xmm) {
2514        self.emit(
2515            VCVTPD2QQ128RR,
2516            op0.as_operand(),
2517            op1.as_operand(),
2518            &NOREG,
2519            &NOREG,
2520        );
2521    }
2522}
2523
2524impl<'a> Vcvtpd2qqEmitter<Xmm, Mem> for Assembler<'a> {
2525    fn vcvtpd2qq(&mut self, op0: Xmm, op1: Mem) {
2526        self.emit(
2527            VCVTPD2QQ128RM,
2528            op0.as_operand(),
2529            op1.as_operand(),
2530            &NOREG,
2531            &NOREG,
2532        );
2533    }
2534}
2535
2536impl<'a> Vcvtpd2qqEmitter<Ymm, Ymm> for Assembler<'a> {
2537    fn vcvtpd2qq(&mut self, op0: Ymm, op1: Ymm) {
2538        self.emit(
2539            VCVTPD2QQ256RR,
2540            op0.as_operand(),
2541            op1.as_operand(),
2542            &NOREG,
2543            &NOREG,
2544        );
2545    }
2546}
2547
2548impl<'a> Vcvtpd2qqEmitter<Ymm, Mem> for Assembler<'a> {
2549    fn vcvtpd2qq(&mut self, op0: Ymm, op1: Mem) {
2550        self.emit(
2551            VCVTPD2QQ256RM,
2552            op0.as_operand(),
2553            op1.as_operand(),
2554            &NOREG,
2555            &NOREG,
2556        );
2557    }
2558}
2559
2560impl<'a> Vcvtpd2qqEmitter<Zmm, Zmm> for Assembler<'a> {
2561    fn vcvtpd2qq(&mut self, op0: Zmm, op1: Zmm) {
2562        self.emit(
2563            VCVTPD2QQ512RR,
2564            op0.as_operand(),
2565            op1.as_operand(),
2566            &NOREG,
2567            &NOREG,
2568        );
2569    }
2570}
2571
2572impl<'a> Vcvtpd2qqEmitter<Zmm, Mem> for Assembler<'a> {
2573    fn vcvtpd2qq(&mut self, op0: Zmm, op1: Mem) {
2574        self.emit(
2575            VCVTPD2QQ512RM,
2576            op0.as_operand(),
2577            op1.as_operand(),
2578            &NOREG,
2579            &NOREG,
2580        );
2581    }
2582}
2583
2584/// `VCVTPD2QQ_ER`.
2585///
2586/// Supported operand variants:
2587///
2588/// ```text
2589/// +---+----------+
2590/// | # | Operands |
2591/// +---+----------+
2592/// | 1 | Zmm, Zmm |
2593/// +---+----------+
2594/// ```
2595pub trait Vcvtpd2qqErEmitter<A, B> {
2596    fn vcvtpd2qq_er(&mut self, op0: A, op1: B);
2597}
2598
2599impl<'a> Vcvtpd2qqErEmitter<Zmm, Zmm> for Assembler<'a> {
2600    fn vcvtpd2qq_er(&mut self, op0: Zmm, op1: Zmm) {
2601        self.emit(
2602            VCVTPD2QQ512RR_ER,
2603            op0.as_operand(),
2604            op1.as_operand(),
2605            &NOREG,
2606            &NOREG,
2607        );
2608    }
2609}
2610
2611/// `VCVTPD2QQ_MASK`.
2612///
2613/// Supported operand variants:
2614///
2615/// ```text
2616/// +---+----------+
2617/// | # | Operands |
2618/// +---+----------+
2619/// | 1 | Xmm, Mem |
2620/// | 2 | Xmm, Xmm |
2621/// | 3 | Ymm, Mem |
2622/// | 4 | Ymm, Ymm |
2623/// | 5 | Zmm, Mem |
2624/// | 6 | Zmm, Zmm |
2625/// +---+----------+
2626/// ```
2627pub trait Vcvtpd2qqMaskEmitter<A, B> {
2628    fn vcvtpd2qq_mask(&mut self, op0: A, op1: B);
2629}
2630
2631impl<'a> Vcvtpd2qqMaskEmitter<Xmm, Xmm> for Assembler<'a> {
2632    fn vcvtpd2qq_mask(&mut self, op0: Xmm, op1: Xmm) {
2633        self.emit(
2634            VCVTPD2QQ128RR_MASK,
2635            op0.as_operand(),
2636            op1.as_operand(),
2637            &NOREG,
2638            &NOREG,
2639        );
2640    }
2641}
2642
2643impl<'a> Vcvtpd2qqMaskEmitter<Xmm, Mem> for Assembler<'a> {
2644    fn vcvtpd2qq_mask(&mut self, op0: Xmm, op1: Mem) {
2645        self.emit(
2646            VCVTPD2QQ128RM_MASK,
2647            op0.as_operand(),
2648            op1.as_operand(),
2649            &NOREG,
2650            &NOREG,
2651        );
2652    }
2653}
2654
2655impl<'a> Vcvtpd2qqMaskEmitter<Ymm, Ymm> for Assembler<'a> {
2656    fn vcvtpd2qq_mask(&mut self, op0: Ymm, op1: Ymm) {
2657        self.emit(
2658            VCVTPD2QQ256RR_MASK,
2659            op0.as_operand(),
2660            op1.as_operand(),
2661            &NOREG,
2662            &NOREG,
2663        );
2664    }
2665}
2666
2667impl<'a> Vcvtpd2qqMaskEmitter<Ymm, Mem> for Assembler<'a> {
2668    fn vcvtpd2qq_mask(&mut self, op0: Ymm, op1: Mem) {
2669        self.emit(
2670            VCVTPD2QQ256RM_MASK,
2671            op0.as_operand(),
2672            op1.as_operand(),
2673            &NOREG,
2674            &NOREG,
2675        );
2676    }
2677}
2678
2679impl<'a> Vcvtpd2qqMaskEmitter<Zmm, Zmm> for Assembler<'a> {
2680    fn vcvtpd2qq_mask(&mut self, op0: Zmm, op1: Zmm) {
2681        self.emit(
2682            VCVTPD2QQ512RR_MASK,
2683            op0.as_operand(),
2684            op1.as_operand(),
2685            &NOREG,
2686            &NOREG,
2687        );
2688    }
2689}
2690
2691impl<'a> Vcvtpd2qqMaskEmitter<Zmm, Mem> for Assembler<'a> {
2692    fn vcvtpd2qq_mask(&mut self, op0: Zmm, op1: Mem) {
2693        self.emit(
2694            VCVTPD2QQ512RM_MASK,
2695            op0.as_operand(),
2696            op1.as_operand(),
2697            &NOREG,
2698            &NOREG,
2699        );
2700    }
2701}
2702
2703/// `VCVTPD2QQ_MASK_ER`.
2704///
2705/// Supported operand variants:
2706///
2707/// ```text
2708/// +---+----------+
2709/// | # | Operands |
2710/// +---+----------+
2711/// | 1 | Zmm, Zmm |
2712/// +---+----------+
2713/// ```
2714pub trait Vcvtpd2qqMaskErEmitter<A, B> {
2715    fn vcvtpd2qq_mask_er(&mut self, op0: A, op1: B);
2716}
2717
2718impl<'a> Vcvtpd2qqMaskErEmitter<Zmm, Zmm> for Assembler<'a> {
2719    fn vcvtpd2qq_mask_er(&mut self, op0: Zmm, op1: Zmm) {
2720        self.emit(
2721            VCVTPD2QQ512RR_MASK_ER,
2722            op0.as_operand(),
2723            op1.as_operand(),
2724            &NOREG,
2725            &NOREG,
2726        );
2727    }
2728}
2729
2730/// `VCVTPD2QQ_MASKZ`.
2731///
2732/// Supported operand variants:
2733///
2734/// ```text
2735/// +---+----------+
2736/// | # | Operands |
2737/// +---+----------+
2738/// | 1 | Xmm, Mem |
2739/// | 2 | Xmm, Xmm |
2740/// | 3 | Ymm, Mem |
2741/// | 4 | Ymm, Ymm |
2742/// | 5 | Zmm, Mem |
2743/// | 6 | Zmm, Zmm |
2744/// +---+----------+
2745/// ```
2746pub trait Vcvtpd2qqMaskzEmitter<A, B> {
2747    fn vcvtpd2qq_maskz(&mut self, op0: A, op1: B);
2748}
2749
2750impl<'a> Vcvtpd2qqMaskzEmitter<Xmm, Xmm> for Assembler<'a> {
2751    fn vcvtpd2qq_maskz(&mut self, op0: Xmm, op1: Xmm) {
2752        self.emit(
2753            VCVTPD2QQ128RR_MASKZ,
2754            op0.as_operand(),
2755            op1.as_operand(),
2756            &NOREG,
2757            &NOREG,
2758        );
2759    }
2760}
2761
2762impl<'a> Vcvtpd2qqMaskzEmitter<Xmm, Mem> for Assembler<'a> {
2763    fn vcvtpd2qq_maskz(&mut self, op0: Xmm, op1: Mem) {
2764        self.emit(
2765            VCVTPD2QQ128RM_MASKZ,
2766            op0.as_operand(),
2767            op1.as_operand(),
2768            &NOREG,
2769            &NOREG,
2770        );
2771    }
2772}
2773
2774impl<'a> Vcvtpd2qqMaskzEmitter<Ymm, Ymm> for Assembler<'a> {
2775    fn vcvtpd2qq_maskz(&mut self, op0: Ymm, op1: Ymm) {
2776        self.emit(
2777            VCVTPD2QQ256RR_MASKZ,
2778            op0.as_operand(),
2779            op1.as_operand(),
2780            &NOREG,
2781            &NOREG,
2782        );
2783    }
2784}
2785
2786impl<'a> Vcvtpd2qqMaskzEmitter<Ymm, Mem> for Assembler<'a> {
2787    fn vcvtpd2qq_maskz(&mut self, op0: Ymm, op1: Mem) {
2788        self.emit(
2789            VCVTPD2QQ256RM_MASKZ,
2790            op0.as_operand(),
2791            op1.as_operand(),
2792            &NOREG,
2793            &NOREG,
2794        );
2795    }
2796}
2797
2798impl<'a> Vcvtpd2qqMaskzEmitter<Zmm, Zmm> for Assembler<'a> {
2799    fn vcvtpd2qq_maskz(&mut self, op0: Zmm, op1: Zmm) {
2800        self.emit(
2801            VCVTPD2QQ512RR_MASKZ,
2802            op0.as_operand(),
2803            op1.as_operand(),
2804            &NOREG,
2805            &NOREG,
2806        );
2807    }
2808}
2809
2810impl<'a> Vcvtpd2qqMaskzEmitter<Zmm, Mem> for Assembler<'a> {
2811    fn vcvtpd2qq_maskz(&mut self, op0: Zmm, op1: Mem) {
2812        self.emit(
2813            VCVTPD2QQ512RM_MASKZ,
2814            op0.as_operand(),
2815            op1.as_operand(),
2816            &NOREG,
2817            &NOREG,
2818        );
2819    }
2820}
2821
2822/// `VCVTPD2QQ_MASKZ_ER`.
2823///
2824/// Supported operand variants:
2825///
2826/// ```text
2827/// +---+----------+
2828/// | # | Operands |
2829/// +---+----------+
2830/// | 1 | Zmm, Zmm |
2831/// +---+----------+
2832/// ```
2833pub trait Vcvtpd2qqMaskzErEmitter<A, B> {
2834    fn vcvtpd2qq_maskz_er(&mut self, op0: A, op1: B);
2835}
2836
2837impl<'a> Vcvtpd2qqMaskzErEmitter<Zmm, Zmm> for Assembler<'a> {
2838    fn vcvtpd2qq_maskz_er(&mut self, op0: Zmm, op1: Zmm) {
2839        self.emit(
2840            VCVTPD2QQ512RR_MASKZ_ER,
2841            op0.as_operand(),
2842            op1.as_operand(),
2843            &NOREG,
2844            &NOREG,
2845        );
2846    }
2847}
2848
2849/// `VCVTPS2QQ`.
2850///
2851/// Supported operand variants:
2852///
2853/// ```text
2854/// +---+----------+
2855/// | # | Operands |
2856/// +---+----------+
2857/// | 1 | Xmm, Mem |
2858/// | 2 | Xmm, Xmm |
2859/// | 3 | Ymm, Mem |
2860/// | 4 | Ymm, Xmm |
2861/// | 5 | Zmm, Mem |
2862/// | 6 | Zmm, Ymm |
2863/// +---+----------+
2864/// ```
2865pub trait Vcvtps2qqEmitter<A, B> {
2866    fn vcvtps2qq(&mut self, op0: A, op1: B);
2867}
2868
2869impl<'a> Vcvtps2qqEmitter<Xmm, Xmm> for Assembler<'a> {
2870    fn vcvtps2qq(&mut self, op0: Xmm, op1: Xmm) {
2871        self.emit(
2872            VCVTPS2QQ128RR,
2873            op0.as_operand(),
2874            op1.as_operand(),
2875            &NOREG,
2876            &NOREG,
2877        );
2878    }
2879}
2880
2881impl<'a> Vcvtps2qqEmitter<Xmm, Mem> for Assembler<'a> {
2882    fn vcvtps2qq(&mut self, op0: Xmm, op1: Mem) {
2883        self.emit(
2884            VCVTPS2QQ128RM,
2885            op0.as_operand(),
2886            op1.as_operand(),
2887            &NOREG,
2888            &NOREG,
2889        );
2890    }
2891}
2892
2893impl<'a> Vcvtps2qqEmitter<Ymm, Xmm> for Assembler<'a> {
2894    fn vcvtps2qq(&mut self, op0: Ymm, op1: Xmm) {
2895        self.emit(
2896            VCVTPS2QQ256RR,
2897            op0.as_operand(),
2898            op1.as_operand(),
2899            &NOREG,
2900            &NOREG,
2901        );
2902    }
2903}
2904
2905impl<'a> Vcvtps2qqEmitter<Ymm, Mem> for Assembler<'a> {
2906    fn vcvtps2qq(&mut self, op0: Ymm, op1: Mem) {
2907        self.emit(
2908            VCVTPS2QQ256RM,
2909            op0.as_operand(),
2910            op1.as_operand(),
2911            &NOREG,
2912            &NOREG,
2913        );
2914    }
2915}
2916
2917impl<'a> Vcvtps2qqEmitter<Zmm, Ymm> for Assembler<'a> {
2918    fn vcvtps2qq(&mut self, op0: Zmm, op1: Ymm) {
2919        self.emit(
2920            VCVTPS2QQ512RR,
2921            op0.as_operand(),
2922            op1.as_operand(),
2923            &NOREG,
2924            &NOREG,
2925        );
2926    }
2927}
2928
2929impl<'a> Vcvtps2qqEmitter<Zmm, Mem> for Assembler<'a> {
2930    fn vcvtps2qq(&mut self, op0: Zmm, op1: Mem) {
2931        self.emit(
2932            VCVTPS2QQ512RM,
2933            op0.as_operand(),
2934            op1.as_operand(),
2935            &NOREG,
2936            &NOREG,
2937        );
2938    }
2939}
2940
2941/// `VCVTPS2QQ_ER`.
2942///
2943/// Supported operand variants:
2944///
2945/// ```text
2946/// +---+----------+
2947/// | # | Operands |
2948/// +---+----------+
2949/// | 1 | Zmm, Ymm |
2950/// +---+----------+
2951/// ```
2952pub trait Vcvtps2qqErEmitter<A, B> {
2953    fn vcvtps2qq_er(&mut self, op0: A, op1: B);
2954}
2955
2956impl<'a> Vcvtps2qqErEmitter<Zmm, Ymm> for Assembler<'a> {
2957    fn vcvtps2qq_er(&mut self, op0: Zmm, op1: Ymm) {
2958        self.emit(
2959            VCVTPS2QQ512RR_ER,
2960            op0.as_operand(),
2961            op1.as_operand(),
2962            &NOREG,
2963            &NOREG,
2964        );
2965    }
2966}
2967
2968/// `VCVTPS2QQ_MASK`.
2969///
2970/// Supported operand variants:
2971///
2972/// ```text
2973/// +---+----------+
2974/// | # | Operands |
2975/// +---+----------+
2976/// | 1 | Xmm, Mem |
2977/// | 2 | Xmm, Xmm |
2978/// | 3 | Ymm, Mem |
2979/// | 4 | Ymm, Xmm |
2980/// | 5 | Zmm, Mem |
2981/// | 6 | Zmm, Ymm |
2982/// +---+----------+
2983/// ```
2984pub trait Vcvtps2qqMaskEmitter<A, B> {
2985    fn vcvtps2qq_mask(&mut self, op0: A, op1: B);
2986}
2987
2988impl<'a> Vcvtps2qqMaskEmitter<Xmm, Xmm> for Assembler<'a> {
2989    fn vcvtps2qq_mask(&mut self, op0: Xmm, op1: Xmm) {
2990        self.emit(
2991            VCVTPS2QQ128RR_MASK,
2992            op0.as_operand(),
2993            op1.as_operand(),
2994            &NOREG,
2995            &NOREG,
2996        );
2997    }
2998}
2999
3000impl<'a> Vcvtps2qqMaskEmitter<Xmm, Mem> for Assembler<'a> {
3001    fn vcvtps2qq_mask(&mut self, op0: Xmm, op1: Mem) {
3002        self.emit(
3003            VCVTPS2QQ128RM_MASK,
3004            op0.as_operand(),
3005            op1.as_operand(),
3006            &NOREG,
3007            &NOREG,
3008        );
3009    }
3010}
3011
3012impl<'a> Vcvtps2qqMaskEmitter<Ymm, Xmm> for Assembler<'a> {
3013    fn vcvtps2qq_mask(&mut self, op0: Ymm, op1: Xmm) {
3014        self.emit(
3015            VCVTPS2QQ256RR_MASK,
3016            op0.as_operand(),
3017            op1.as_operand(),
3018            &NOREG,
3019            &NOREG,
3020        );
3021    }
3022}
3023
3024impl<'a> Vcvtps2qqMaskEmitter<Ymm, Mem> for Assembler<'a> {
3025    fn vcvtps2qq_mask(&mut self, op0: Ymm, op1: Mem) {
3026        self.emit(
3027            VCVTPS2QQ256RM_MASK,
3028            op0.as_operand(),
3029            op1.as_operand(),
3030            &NOREG,
3031            &NOREG,
3032        );
3033    }
3034}
3035
3036impl<'a> Vcvtps2qqMaskEmitter<Zmm, Ymm> for Assembler<'a> {
3037    fn vcvtps2qq_mask(&mut self, op0: Zmm, op1: Ymm) {
3038        self.emit(
3039            VCVTPS2QQ512RR_MASK,
3040            op0.as_operand(),
3041            op1.as_operand(),
3042            &NOREG,
3043            &NOREG,
3044        );
3045    }
3046}
3047
3048impl<'a> Vcvtps2qqMaskEmitter<Zmm, Mem> for Assembler<'a> {
3049    fn vcvtps2qq_mask(&mut self, op0: Zmm, op1: Mem) {
3050        self.emit(
3051            VCVTPS2QQ512RM_MASK,
3052            op0.as_operand(),
3053            op1.as_operand(),
3054            &NOREG,
3055            &NOREG,
3056        );
3057    }
3058}
3059
3060/// `VCVTPS2QQ_MASK_ER`.
3061///
3062/// Supported operand variants:
3063///
3064/// ```text
3065/// +---+----------+
3066/// | # | Operands |
3067/// +---+----------+
3068/// | 1 | Zmm, Ymm |
3069/// +---+----------+
3070/// ```
3071pub trait Vcvtps2qqMaskErEmitter<A, B> {
3072    fn vcvtps2qq_mask_er(&mut self, op0: A, op1: B);
3073}
3074
3075impl<'a> Vcvtps2qqMaskErEmitter<Zmm, Ymm> for Assembler<'a> {
3076    fn vcvtps2qq_mask_er(&mut self, op0: Zmm, op1: Ymm) {
3077        self.emit(
3078            VCVTPS2QQ512RR_MASK_ER,
3079            op0.as_operand(),
3080            op1.as_operand(),
3081            &NOREG,
3082            &NOREG,
3083        );
3084    }
3085}
3086
3087/// `VCVTPS2QQ_MASKZ`.
3088///
3089/// Supported operand variants:
3090///
3091/// ```text
3092/// +---+----------+
3093/// | # | Operands |
3094/// +---+----------+
3095/// | 1 | Xmm, Mem |
3096/// | 2 | Xmm, Xmm |
3097/// | 3 | Ymm, Mem |
3098/// | 4 | Ymm, Xmm |
3099/// | 5 | Zmm, Mem |
3100/// | 6 | Zmm, Ymm |
3101/// +---+----------+
3102/// ```
3103pub trait Vcvtps2qqMaskzEmitter<A, B> {
3104    fn vcvtps2qq_maskz(&mut self, op0: A, op1: B);
3105}
3106
3107impl<'a> Vcvtps2qqMaskzEmitter<Xmm, Xmm> for Assembler<'a> {
3108    fn vcvtps2qq_maskz(&mut self, op0: Xmm, op1: Xmm) {
3109        self.emit(
3110            VCVTPS2QQ128RR_MASKZ,
3111            op0.as_operand(),
3112            op1.as_operand(),
3113            &NOREG,
3114            &NOREG,
3115        );
3116    }
3117}
3118
3119impl<'a> Vcvtps2qqMaskzEmitter<Xmm, Mem> for Assembler<'a> {
3120    fn vcvtps2qq_maskz(&mut self, op0: Xmm, op1: Mem) {
3121        self.emit(
3122            VCVTPS2QQ128RM_MASKZ,
3123            op0.as_operand(),
3124            op1.as_operand(),
3125            &NOREG,
3126            &NOREG,
3127        );
3128    }
3129}
3130
3131impl<'a> Vcvtps2qqMaskzEmitter<Ymm, Xmm> for Assembler<'a> {
3132    fn vcvtps2qq_maskz(&mut self, op0: Ymm, op1: Xmm) {
3133        self.emit(
3134            VCVTPS2QQ256RR_MASKZ,
3135            op0.as_operand(),
3136            op1.as_operand(),
3137            &NOREG,
3138            &NOREG,
3139        );
3140    }
3141}
3142
3143impl<'a> Vcvtps2qqMaskzEmitter<Ymm, Mem> for Assembler<'a> {
3144    fn vcvtps2qq_maskz(&mut self, op0: Ymm, op1: Mem) {
3145        self.emit(
3146            VCVTPS2QQ256RM_MASKZ,
3147            op0.as_operand(),
3148            op1.as_operand(),
3149            &NOREG,
3150            &NOREG,
3151        );
3152    }
3153}
3154
3155impl<'a> Vcvtps2qqMaskzEmitter<Zmm, Ymm> for Assembler<'a> {
3156    fn vcvtps2qq_maskz(&mut self, op0: Zmm, op1: Ymm) {
3157        self.emit(
3158            VCVTPS2QQ512RR_MASKZ,
3159            op0.as_operand(),
3160            op1.as_operand(),
3161            &NOREG,
3162            &NOREG,
3163        );
3164    }
3165}
3166
3167impl<'a> Vcvtps2qqMaskzEmitter<Zmm, Mem> for Assembler<'a> {
3168    fn vcvtps2qq_maskz(&mut self, op0: Zmm, op1: Mem) {
3169        self.emit(
3170            VCVTPS2QQ512RM_MASKZ,
3171            op0.as_operand(),
3172            op1.as_operand(),
3173            &NOREG,
3174            &NOREG,
3175        );
3176    }
3177}
3178
3179/// `VCVTPS2QQ_MASKZ_ER`.
3180///
3181/// Supported operand variants:
3182///
3183/// ```text
3184/// +---+----------+
3185/// | # | Operands |
3186/// +---+----------+
3187/// | 1 | Zmm, Ymm |
3188/// +---+----------+
3189/// ```
3190pub trait Vcvtps2qqMaskzErEmitter<A, B> {
3191    fn vcvtps2qq_maskz_er(&mut self, op0: A, op1: B);
3192}
3193
3194impl<'a> Vcvtps2qqMaskzErEmitter<Zmm, Ymm> for Assembler<'a> {
3195    fn vcvtps2qq_maskz_er(&mut self, op0: Zmm, op1: Ymm) {
3196        self.emit(
3197            VCVTPS2QQ512RR_MASKZ_ER,
3198            op0.as_operand(),
3199            op1.as_operand(),
3200            &NOREG,
3201            &NOREG,
3202        );
3203    }
3204}
3205
3206/// `VCVTQQ2PD`.
3207///
3208/// Supported operand variants:
3209///
3210/// ```text
3211/// +---+----------+
3212/// | # | Operands |
3213/// +---+----------+
3214/// | 1 | Xmm, Mem |
3215/// | 2 | Xmm, Xmm |
3216/// | 3 | Ymm, Mem |
3217/// | 4 | Ymm, Ymm |
3218/// | 5 | Zmm, Mem |
3219/// | 6 | Zmm, Zmm |
3220/// +---+----------+
3221/// ```
3222pub trait Vcvtqq2pdEmitter<A, B> {
3223    fn vcvtqq2pd(&mut self, op0: A, op1: B);
3224}
3225
3226impl<'a> Vcvtqq2pdEmitter<Xmm, Xmm> for Assembler<'a> {
3227    fn vcvtqq2pd(&mut self, op0: Xmm, op1: Xmm) {
3228        self.emit(
3229            VCVTQQ2PD128RR,
3230            op0.as_operand(),
3231            op1.as_operand(),
3232            &NOREG,
3233            &NOREG,
3234        );
3235    }
3236}
3237
3238impl<'a> Vcvtqq2pdEmitter<Xmm, Mem> for Assembler<'a> {
3239    fn vcvtqq2pd(&mut self, op0: Xmm, op1: Mem) {
3240        self.emit(
3241            VCVTQQ2PD128RM,
3242            op0.as_operand(),
3243            op1.as_operand(),
3244            &NOREG,
3245            &NOREG,
3246        );
3247    }
3248}
3249
3250impl<'a> Vcvtqq2pdEmitter<Ymm, Ymm> for Assembler<'a> {
3251    fn vcvtqq2pd(&mut self, op0: Ymm, op1: Ymm) {
3252        self.emit(
3253            VCVTQQ2PD256RR,
3254            op0.as_operand(),
3255            op1.as_operand(),
3256            &NOREG,
3257            &NOREG,
3258        );
3259    }
3260}
3261
3262impl<'a> Vcvtqq2pdEmitter<Ymm, Mem> for Assembler<'a> {
3263    fn vcvtqq2pd(&mut self, op0: Ymm, op1: Mem) {
3264        self.emit(
3265            VCVTQQ2PD256RM,
3266            op0.as_operand(),
3267            op1.as_operand(),
3268            &NOREG,
3269            &NOREG,
3270        );
3271    }
3272}
3273
3274impl<'a> Vcvtqq2pdEmitter<Zmm, Zmm> for Assembler<'a> {
3275    fn vcvtqq2pd(&mut self, op0: Zmm, op1: Zmm) {
3276        self.emit(
3277            VCVTQQ2PD512RR,
3278            op0.as_operand(),
3279            op1.as_operand(),
3280            &NOREG,
3281            &NOREG,
3282        );
3283    }
3284}
3285
3286impl<'a> Vcvtqq2pdEmitter<Zmm, Mem> for Assembler<'a> {
3287    fn vcvtqq2pd(&mut self, op0: Zmm, op1: Mem) {
3288        self.emit(
3289            VCVTQQ2PD512RM,
3290            op0.as_operand(),
3291            op1.as_operand(),
3292            &NOREG,
3293            &NOREG,
3294        );
3295    }
3296}
3297
3298/// `VCVTQQ2PD_ER`.
3299///
3300/// Supported operand variants:
3301///
3302/// ```text
3303/// +---+----------+
3304/// | # | Operands |
3305/// +---+----------+
3306/// | 1 | Zmm, Zmm |
3307/// +---+----------+
3308/// ```
3309pub trait Vcvtqq2pdErEmitter<A, B> {
3310    fn vcvtqq2pd_er(&mut self, op0: A, op1: B);
3311}
3312
3313impl<'a> Vcvtqq2pdErEmitter<Zmm, Zmm> for Assembler<'a> {
3314    fn vcvtqq2pd_er(&mut self, op0: Zmm, op1: Zmm) {
3315        self.emit(
3316            VCVTQQ2PD512RR_ER,
3317            op0.as_operand(),
3318            op1.as_operand(),
3319            &NOREG,
3320            &NOREG,
3321        );
3322    }
3323}
3324
3325/// `VCVTQQ2PD_MASK`.
3326///
3327/// Supported operand variants:
3328///
3329/// ```text
3330/// +---+----------+
3331/// | # | Operands |
3332/// +---+----------+
3333/// | 1 | Xmm, Mem |
3334/// | 2 | Xmm, Xmm |
3335/// | 3 | Ymm, Mem |
3336/// | 4 | Ymm, Ymm |
3337/// | 5 | Zmm, Mem |
3338/// | 6 | Zmm, Zmm |
3339/// +---+----------+
3340/// ```
3341pub trait Vcvtqq2pdMaskEmitter<A, B> {
3342    fn vcvtqq2pd_mask(&mut self, op0: A, op1: B);
3343}
3344
3345impl<'a> Vcvtqq2pdMaskEmitter<Xmm, Xmm> for Assembler<'a> {
3346    fn vcvtqq2pd_mask(&mut self, op0: Xmm, op1: Xmm) {
3347        self.emit(
3348            VCVTQQ2PD128RR_MASK,
3349            op0.as_operand(),
3350            op1.as_operand(),
3351            &NOREG,
3352            &NOREG,
3353        );
3354    }
3355}
3356
3357impl<'a> Vcvtqq2pdMaskEmitter<Xmm, Mem> for Assembler<'a> {
3358    fn vcvtqq2pd_mask(&mut self, op0: Xmm, op1: Mem) {
3359        self.emit(
3360            VCVTQQ2PD128RM_MASK,
3361            op0.as_operand(),
3362            op1.as_operand(),
3363            &NOREG,
3364            &NOREG,
3365        );
3366    }
3367}
3368
3369impl<'a> Vcvtqq2pdMaskEmitter<Ymm, Ymm> for Assembler<'a> {
3370    fn vcvtqq2pd_mask(&mut self, op0: Ymm, op1: Ymm) {
3371        self.emit(
3372            VCVTQQ2PD256RR_MASK,
3373            op0.as_operand(),
3374            op1.as_operand(),
3375            &NOREG,
3376            &NOREG,
3377        );
3378    }
3379}
3380
3381impl<'a> Vcvtqq2pdMaskEmitter<Ymm, Mem> for Assembler<'a> {
3382    fn vcvtqq2pd_mask(&mut self, op0: Ymm, op1: Mem) {
3383        self.emit(
3384            VCVTQQ2PD256RM_MASK,
3385            op0.as_operand(),
3386            op1.as_operand(),
3387            &NOREG,
3388            &NOREG,
3389        );
3390    }
3391}
3392
3393impl<'a> Vcvtqq2pdMaskEmitter<Zmm, Zmm> for Assembler<'a> {
3394    fn vcvtqq2pd_mask(&mut self, op0: Zmm, op1: Zmm) {
3395        self.emit(
3396            VCVTQQ2PD512RR_MASK,
3397            op0.as_operand(),
3398            op1.as_operand(),
3399            &NOREG,
3400            &NOREG,
3401        );
3402    }
3403}
3404
3405impl<'a> Vcvtqq2pdMaskEmitter<Zmm, Mem> for Assembler<'a> {
3406    fn vcvtqq2pd_mask(&mut self, op0: Zmm, op1: Mem) {
3407        self.emit(
3408            VCVTQQ2PD512RM_MASK,
3409            op0.as_operand(),
3410            op1.as_operand(),
3411            &NOREG,
3412            &NOREG,
3413        );
3414    }
3415}
3416
3417/// `VCVTQQ2PD_MASK_ER`.
3418///
3419/// Supported operand variants:
3420///
3421/// ```text
3422/// +---+----------+
3423/// | # | Operands |
3424/// +---+----------+
3425/// | 1 | Zmm, Zmm |
3426/// +---+----------+
3427/// ```
3428pub trait Vcvtqq2pdMaskErEmitter<A, B> {
3429    fn vcvtqq2pd_mask_er(&mut self, op0: A, op1: B);
3430}
3431
3432impl<'a> Vcvtqq2pdMaskErEmitter<Zmm, Zmm> for Assembler<'a> {
3433    fn vcvtqq2pd_mask_er(&mut self, op0: Zmm, op1: Zmm) {
3434        self.emit(
3435            VCVTQQ2PD512RR_MASK_ER,
3436            op0.as_operand(),
3437            op1.as_operand(),
3438            &NOREG,
3439            &NOREG,
3440        );
3441    }
3442}
3443
3444/// `VCVTQQ2PD_MASKZ`.
3445///
3446/// Supported operand variants:
3447///
3448/// ```text
3449/// +---+----------+
3450/// | # | Operands |
3451/// +---+----------+
3452/// | 1 | Xmm, Mem |
3453/// | 2 | Xmm, Xmm |
3454/// | 3 | Ymm, Mem |
3455/// | 4 | Ymm, Ymm |
3456/// | 5 | Zmm, Mem |
3457/// | 6 | Zmm, Zmm |
3458/// +---+----------+
3459/// ```
3460pub trait Vcvtqq2pdMaskzEmitter<A, B> {
3461    fn vcvtqq2pd_maskz(&mut self, op0: A, op1: B);
3462}
3463
3464impl<'a> Vcvtqq2pdMaskzEmitter<Xmm, Xmm> for Assembler<'a> {
3465    fn vcvtqq2pd_maskz(&mut self, op0: Xmm, op1: Xmm) {
3466        self.emit(
3467            VCVTQQ2PD128RR_MASKZ,
3468            op0.as_operand(),
3469            op1.as_operand(),
3470            &NOREG,
3471            &NOREG,
3472        );
3473    }
3474}
3475
3476impl<'a> Vcvtqq2pdMaskzEmitter<Xmm, Mem> for Assembler<'a> {
3477    fn vcvtqq2pd_maskz(&mut self, op0: Xmm, op1: Mem) {
3478        self.emit(
3479            VCVTQQ2PD128RM_MASKZ,
3480            op0.as_operand(),
3481            op1.as_operand(),
3482            &NOREG,
3483            &NOREG,
3484        );
3485    }
3486}
3487
3488impl<'a> Vcvtqq2pdMaskzEmitter<Ymm, Ymm> for Assembler<'a> {
3489    fn vcvtqq2pd_maskz(&mut self, op0: Ymm, op1: Ymm) {
3490        self.emit(
3491            VCVTQQ2PD256RR_MASKZ,
3492            op0.as_operand(),
3493            op1.as_operand(),
3494            &NOREG,
3495            &NOREG,
3496        );
3497    }
3498}
3499
3500impl<'a> Vcvtqq2pdMaskzEmitter<Ymm, Mem> for Assembler<'a> {
3501    fn vcvtqq2pd_maskz(&mut self, op0: Ymm, op1: Mem) {
3502        self.emit(
3503            VCVTQQ2PD256RM_MASKZ,
3504            op0.as_operand(),
3505            op1.as_operand(),
3506            &NOREG,
3507            &NOREG,
3508        );
3509    }
3510}
3511
3512impl<'a> Vcvtqq2pdMaskzEmitter<Zmm, Zmm> for Assembler<'a> {
3513    fn vcvtqq2pd_maskz(&mut self, op0: Zmm, op1: Zmm) {
3514        self.emit(
3515            VCVTQQ2PD512RR_MASKZ,
3516            op0.as_operand(),
3517            op1.as_operand(),
3518            &NOREG,
3519            &NOREG,
3520        );
3521    }
3522}
3523
3524impl<'a> Vcvtqq2pdMaskzEmitter<Zmm, Mem> for Assembler<'a> {
3525    fn vcvtqq2pd_maskz(&mut self, op0: Zmm, op1: Mem) {
3526        self.emit(
3527            VCVTQQ2PD512RM_MASKZ,
3528            op0.as_operand(),
3529            op1.as_operand(),
3530            &NOREG,
3531            &NOREG,
3532        );
3533    }
3534}
3535
3536/// `VCVTQQ2PD_MASKZ_ER`.
3537///
3538/// Supported operand variants:
3539///
3540/// ```text
3541/// +---+----------+
3542/// | # | Operands |
3543/// +---+----------+
3544/// | 1 | Zmm, Zmm |
3545/// +---+----------+
3546/// ```
3547pub trait Vcvtqq2pdMaskzErEmitter<A, B> {
3548    fn vcvtqq2pd_maskz_er(&mut self, op0: A, op1: B);
3549}
3550
3551impl<'a> Vcvtqq2pdMaskzErEmitter<Zmm, Zmm> for Assembler<'a> {
3552    fn vcvtqq2pd_maskz_er(&mut self, op0: Zmm, op1: Zmm) {
3553        self.emit(
3554            VCVTQQ2PD512RR_MASKZ_ER,
3555            op0.as_operand(),
3556            op1.as_operand(),
3557            &NOREG,
3558            &NOREG,
3559        );
3560    }
3561}
3562
3563/// `VCVTQQ2PS`.
3564///
3565/// Supported operand variants:
3566///
3567/// ```text
3568/// +---+----------+
3569/// | # | Operands |
3570/// +---+----------+
3571/// | 1 | Xmm, Mem |
3572/// | 2 | Xmm, Xmm |
3573/// | 3 | Xmm, Ymm |
3574/// | 4 | Ymm, Mem |
3575/// | 5 | Ymm, Zmm |
3576/// +---+----------+
3577/// ```
3578pub trait Vcvtqq2psEmitter<A, B> {
3579    fn vcvtqq2ps(&mut self, op0: A, op1: B);
3580}
3581
3582impl<'a> Vcvtqq2psEmitter<Xmm, Xmm> for Assembler<'a> {
3583    fn vcvtqq2ps(&mut self, op0: Xmm, op1: Xmm) {
3584        self.emit(
3585            VCVTQQ2PS128RR,
3586            op0.as_operand(),
3587            op1.as_operand(),
3588            &NOREG,
3589            &NOREG,
3590        );
3591    }
3592}
3593
3594impl<'a> Vcvtqq2psEmitter<Xmm, Mem> for Assembler<'a> {
3595    fn vcvtqq2ps(&mut self, op0: Xmm, op1: Mem) {
3596        self.emit(
3597            VCVTQQ2PS128RM,
3598            op0.as_operand(),
3599            op1.as_operand(),
3600            &NOREG,
3601            &NOREG,
3602        );
3603    }
3604}
3605
3606impl<'a> Vcvtqq2psEmitter<Xmm, Ymm> for Assembler<'a> {
3607    fn vcvtqq2ps(&mut self, op0: Xmm, op1: Ymm) {
3608        self.emit(
3609            VCVTQQ2PS256RR,
3610            op0.as_operand(),
3611            op1.as_operand(),
3612            &NOREG,
3613            &NOREG,
3614        );
3615    }
3616}
3617
3618impl<'a> Vcvtqq2psEmitter<Ymm, Zmm> for Assembler<'a> {
3619    fn vcvtqq2ps(&mut self, op0: Ymm, op1: Zmm) {
3620        self.emit(
3621            VCVTQQ2PS512RR,
3622            op0.as_operand(),
3623            op1.as_operand(),
3624            &NOREG,
3625            &NOREG,
3626        );
3627    }
3628}
3629
3630impl<'a> Vcvtqq2psEmitter<Ymm, Mem> for Assembler<'a> {
3631    fn vcvtqq2ps(&mut self, op0: Ymm, op1: Mem) {
3632        self.emit(
3633            VCVTQQ2PS512RM,
3634            op0.as_operand(),
3635            op1.as_operand(),
3636            &NOREG,
3637            &NOREG,
3638        );
3639    }
3640}
3641
3642/// `VCVTQQ2PS_ER`.
3643///
3644/// Supported operand variants:
3645///
3646/// ```text
3647/// +---+----------+
3648/// | # | Operands |
3649/// +---+----------+
3650/// | 1 | Ymm, Zmm |
3651/// +---+----------+
3652/// ```
3653pub trait Vcvtqq2psErEmitter<A, B> {
3654    fn vcvtqq2ps_er(&mut self, op0: A, op1: B);
3655}
3656
3657impl<'a> Vcvtqq2psErEmitter<Ymm, Zmm> for Assembler<'a> {
3658    fn vcvtqq2ps_er(&mut self, op0: Ymm, op1: Zmm) {
3659        self.emit(
3660            VCVTQQ2PS512RR_ER,
3661            op0.as_operand(),
3662            op1.as_operand(),
3663            &NOREG,
3664            &NOREG,
3665        );
3666    }
3667}
3668
3669/// `VCVTQQ2PS_MASK`.
3670///
3671/// Supported operand variants:
3672///
3673/// ```text
3674/// +---+----------+
3675/// | # | Operands |
3676/// +---+----------+
3677/// | 1 | Xmm, Mem |
3678/// | 2 | Xmm, Xmm |
3679/// | 3 | Xmm, Ymm |
3680/// | 4 | Ymm, Mem |
3681/// | 5 | Ymm, Zmm |
3682/// +---+----------+
3683/// ```
3684pub trait Vcvtqq2psMaskEmitter<A, B> {
3685    fn vcvtqq2ps_mask(&mut self, op0: A, op1: B);
3686}
3687
3688impl<'a> Vcvtqq2psMaskEmitter<Xmm, Xmm> for Assembler<'a> {
3689    fn vcvtqq2ps_mask(&mut self, op0: Xmm, op1: Xmm) {
3690        self.emit(
3691            VCVTQQ2PS128RR_MASK,
3692            op0.as_operand(),
3693            op1.as_operand(),
3694            &NOREG,
3695            &NOREG,
3696        );
3697    }
3698}
3699
3700impl<'a> Vcvtqq2psMaskEmitter<Xmm, Mem> for Assembler<'a> {
3701    fn vcvtqq2ps_mask(&mut self, op0: Xmm, op1: Mem) {
3702        self.emit(
3703            VCVTQQ2PS128RM_MASK,
3704            op0.as_operand(),
3705            op1.as_operand(),
3706            &NOREG,
3707            &NOREG,
3708        );
3709    }
3710}
3711
3712impl<'a> Vcvtqq2psMaskEmitter<Xmm, Ymm> for Assembler<'a> {
3713    fn vcvtqq2ps_mask(&mut self, op0: Xmm, op1: Ymm) {
3714        self.emit(
3715            VCVTQQ2PS256RR_MASK,
3716            op0.as_operand(),
3717            op1.as_operand(),
3718            &NOREG,
3719            &NOREG,
3720        );
3721    }
3722}
3723
3724impl<'a> Vcvtqq2psMaskEmitter<Ymm, Zmm> for Assembler<'a> {
3725    fn vcvtqq2ps_mask(&mut self, op0: Ymm, op1: Zmm) {
3726        self.emit(
3727            VCVTQQ2PS512RR_MASK,
3728            op0.as_operand(),
3729            op1.as_operand(),
3730            &NOREG,
3731            &NOREG,
3732        );
3733    }
3734}
3735
3736impl<'a> Vcvtqq2psMaskEmitter<Ymm, Mem> for Assembler<'a> {
3737    fn vcvtqq2ps_mask(&mut self, op0: Ymm, op1: Mem) {
3738        self.emit(
3739            VCVTQQ2PS512RM_MASK,
3740            op0.as_operand(),
3741            op1.as_operand(),
3742            &NOREG,
3743            &NOREG,
3744        );
3745    }
3746}
3747
3748/// `VCVTQQ2PS_MASK_ER`.
3749///
3750/// Supported operand variants:
3751///
3752/// ```text
3753/// +---+----------+
3754/// | # | Operands |
3755/// +---+----------+
3756/// | 1 | Ymm, Zmm |
3757/// +---+----------+
3758/// ```
3759pub trait Vcvtqq2psMaskErEmitter<A, B> {
3760    fn vcvtqq2ps_mask_er(&mut self, op0: A, op1: B);
3761}
3762
3763impl<'a> Vcvtqq2psMaskErEmitter<Ymm, Zmm> for Assembler<'a> {
3764    fn vcvtqq2ps_mask_er(&mut self, op0: Ymm, op1: Zmm) {
3765        self.emit(
3766            VCVTQQ2PS512RR_MASK_ER,
3767            op0.as_operand(),
3768            op1.as_operand(),
3769            &NOREG,
3770            &NOREG,
3771        );
3772    }
3773}
3774
3775/// `VCVTQQ2PS_MASKZ`.
3776///
3777/// Supported operand variants:
3778///
3779/// ```text
3780/// +---+----------+
3781/// | # | Operands |
3782/// +---+----------+
3783/// | 1 | Xmm, Mem |
3784/// | 2 | Xmm, Xmm |
3785/// | 3 | Xmm, Ymm |
3786/// | 4 | Ymm, Mem |
3787/// | 5 | Ymm, Zmm |
3788/// +---+----------+
3789/// ```
3790pub trait Vcvtqq2psMaskzEmitter<A, B> {
3791    fn vcvtqq2ps_maskz(&mut self, op0: A, op1: B);
3792}
3793
3794impl<'a> Vcvtqq2psMaskzEmitter<Xmm, Xmm> for Assembler<'a> {
3795    fn vcvtqq2ps_maskz(&mut self, op0: Xmm, op1: Xmm) {
3796        self.emit(
3797            VCVTQQ2PS128RR_MASKZ,
3798            op0.as_operand(),
3799            op1.as_operand(),
3800            &NOREG,
3801            &NOREG,
3802        );
3803    }
3804}
3805
3806impl<'a> Vcvtqq2psMaskzEmitter<Xmm, Mem> for Assembler<'a> {
3807    fn vcvtqq2ps_maskz(&mut self, op0: Xmm, op1: Mem) {
3808        self.emit(
3809            VCVTQQ2PS128RM_MASKZ,
3810            op0.as_operand(),
3811            op1.as_operand(),
3812            &NOREG,
3813            &NOREG,
3814        );
3815    }
3816}
3817
3818impl<'a> Vcvtqq2psMaskzEmitter<Xmm, Ymm> for Assembler<'a> {
3819    fn vcvtqq2ps_maskz(&mut self, op0: Xmm, op1: Ymm) {
3820        self.emit(
3821            VCVTQQ2PS256RR_MASKZ,
3822            op0.as_operand(),
3823            op1.as_operand(),
3824            &NOREG,
3825            &NOREG,
3826        );
3827    }
3828}
3829
3830impl<'a> Vcvtqq2psMaskzEmitter<Ymm, Zmm> for Assembler<'a> {
3831    fn vcvtqq2ps_maskz(&mut self, op0: Ymm, op1: Zmm) {
3832        self.emit(
3833            VCVTQQ2PS512RR_MASKZ,
3834            op0.as_operand(),
3835            op1.as_operand(),
3836            &NOREG,
3837            &NOREG,
3838        );
3839    }
3840}
3841
3842impl<'a> Vcvtqq2psMaskzEmitter<Ymm, Mem> for Assembler<'a> {
3843    fn vcvtqq2ps_maskz(&mut self, op0: Ymm, op1: Mem) {
3844        self.emit(
3845            VCVTQQ2PS512RM_MASKZ,
3846            op0.as_operand(),
3847            op1.as_operand(),
3848            &NOREG,
3849            &NOREG,
3850        );
3851    }
3852}
3853
3854/// `VCVTQQ2PS_MASKZ_ER`.
3855///
3856/// Supported operand variants:
3857///
3858/// ```text
3859/// +---+----------+
3860/// | # | Operands |
3861/// +---+----------+
3862/// | 1 | Ymm, Zmm |
3863/// +---+----------+
3864/// ```
3865pub trait Vcvtqq2psMaskzErEmitter<A, B> {
3866    fn vcvtqq2ps_maskz_er(&mut self, op0: A, op1: B);
3867}
3868
3869impl<'a> Vcvtqq2psMaskzErEmitter<Ymm, Zmm> for Assembler<'a> {
3870    fn vcvtqq2ps_maskz_er(&mut self, op0: Ymm, op1: Zmm) {
3871        self.emit(
3872            VCVTQQ2PS512RR_MASKZ_ER,
3873            op0.as_operand(),
3874            op1.as_operand(),
3875            &NOREG,
3876            &NOREG,
3877        );
3878    }
3879}
3880
3881/// `VCVTTPD2QQ`.
3882///
3883/// Supported operand variants:
3884///
3885/// ```text
3886/// +---+----------+
3887/// | # | Operands |
3888/// +---+----------+
3889/// | 1 | Xmm, Mem |
3890/// | 2 | Xmm, Xmm |
3891/// | 3 | Ymm, Mem |
3892/// | 4 | Ymm, Ymm |
3893/// | 5 | Zmm, Mem |
3894/// | 6 | Zmm, Zmm |
3895/// +---+----------+
3896/// ```
3897pub trait Vcvttpd2qqEmitter<A, B> {
3898    fn vcvttpd2qq(&mut self, op0: A, op1: B);
3899}
3900
3901impl<'a> Vcvttpd2qqEmitter<Xmm, Xmm> for Assembler<'a> {
3902    fn vcvttpd2qq(&mut self, op0: Xmm, op1: Xmm) {
3903        self.emit(
3904            VCVTTPD2QQ128RR,
3905            op0.as_operand(),
3906            op1.as_operand(),
3907            &NOREG,
3908            &NOREG,
3909        );
3910    }
3911}
3912
3913impl<'a> Vcvttpd2qqEmitter<Xmm, Mem> for Assembler<'a> {
3914    fn vcvttpd2qq(&mut self, op0: Xmm, op1: Mem) {
3915        self.emit(
3916            VCVTTPD2QQ128RM,
3917            op0.as_operand(),
3918            op1.as_operand(),
3919            &NOREG,
3920            &NOREG,
3921        );
3922    }
3923}
3924
3925impl<'a> Vcvttpd2qqEmitter<Ymm, Ymm> for Assembler<'a> {
3926    fn vcvttpd2qq(&mut self, op0: Ymm, op1: Ymm) {
3927        self.emit(
3928            VCVTTPD2QQ256RR,
3929            op0.as_operand(),
3930            op1.as_operand(),
3931            &NOREG,
3932            &NOREG,
3933        );
3934    }
3935}
3936
3937impl<'a> Vcvttpd2qqEmitter<Ymm, Mem> for Assembler<'a> {
3938    fn vcvttpd2qq(&mut self, op0: Ymm, op1: Mem) {
3939        self.emit(
3940            VCVTTPD2QQ256RM,
3941            op0.as_operand(),
3942            op1.as_operand(),
3943            &NOREG,
3944            &NOREG,
3945        );
3946    }
3947}
3948
3949impl<'a> Vcvttpd2qqEmitter<Zmm, Zmm> for Assembler<'a> {
3950    fn vcvttpd2qq(&mut self, op0: Zmm, op1: Zmm) {
3951        self.emit(
3952            VCVTTPD2QQ512RR,
3953            op0.as_operand(),
3954            op1.as_operand(),
3955            &NOREG,
3956            &NOREG,
3957        );
3958    }
3959}
3960
3961impl<'a> Vcvttpd2qqEmitter<Zmm, Mem> for Assembler<'a> {
3962    fn vcvttpd2qq(&mut self, op0: Zmm, op1: Mem) {
3963        self.emit(
3964            VCVTTPD2QQ512RM,
3965            op0.as_operand(),
3966            op1.as_operand(),
3967            &NOREG,
3968            &NOREG,
3969        );
3970    }
3971}
3972
3973/// `VCVTTPD2QQ_MASK`.
3974///
3975/// Supported operand variants:
3976///
3977/// ```text
3978/// +---+----------+
3979/// | # | Operands |
3980/// +---+----------+
3981/// | 1 | Xmm, Mem |
3982/// | 2 | Xmm, Xmm |
3983/// | 3 | Ymm, Mem |
3984/// | 4 | Ymm, Ymm |
3985/// | 5 | Zmm, Mem |
3986/// | 6 | Zmm, Zmm |
3987/// +---+----------+
3988/// ```
3989pub trait Vcvttpd2qqMaskEmitter<A, B> {
3990    fn vcvttpd2qq_mask(&mut self, op0: A, op1: B);
3991}
3992
3993impl<'a> Vcvttpd2qqMaskEmitter<Xmm, Xmm> for Assembler<'a> {
3994    fn vcvttpd2qq_mask(&mut self, op0: Xmm, op1: Xmm) {
3995        self.emit(
3996            VCVTTPD2QQ128RR_MASK,
3997            op0.as_operand(),
3998            op1.as_operand(),
3999            &NOREG,
4000            &NOREG,
4001        );
4002    }
4003}
4004
4005impl<'a> Vcvttpd2qqMaskEmitter<Xmm, Mem> for Assembler<'a> {
4006    fn vcvttpd2qq_mask(&mut self, op0: Xmm, op1: Mem) {
4007        self.emit(
4008            VCVTTPD2QQ128RM_MASK,
4009            op0.as_operand(),
4010            op1.as_operand(),
4011            &NOREG,
4012            &NOREG,
4013        );
4014    }
4015}
4016
4017impl<'a> Vcvttpd2qqMaskEmitter<Ymm, Ymm> for Assembler<'a> {
4018    fn vcvttpd2qq_mask(&mut self, op0: Ymm, op1: Ymm) {
4019        self.emit(
4020            VCVTTPD2QQ256RR_MASK,
4021            op0.as_operand(),
4022            op1.as_operand(),
4023            &NOREG,
4024            &NOREG,
4025        );
4026    }
4027}
4028
4029impl<'a> Vcvttpd2qqMaskEmitter<Ymm, Mem> for Assembler<'a> {
4030    fn vcvttpd2qq_mask(&mut self, op0: Ymm, op1: Mem) {
4031        self.emit(
4032            VCVTTPD2QQ256RM_MASK,
4033            op0.as_operand(),
4034            op1.as_operand(),
4035            &NOREG,
4036            &NOREG,
4037        );
4038    }
4039}
4040
4041impl<'a> Vcvttpd2qqMaskEmitter<Zmm, Zmm> for Assembler<'a> {
4042    fn vcvttpd2qq_mask(&mut self, op0: Zmm, op1: Zmm) {
4043        self.emit(
4044            VCVTTPD2QQ512RR_MASK,
4045            op0.as_operand(),
4046            op1.as_operand(),
4047            &NOREG,
4048            &NOREG,
4049        );
4050    }
4051}
4052
4053impl<'a> Vcvttpd2qqMaskEmitter<Zmm, Mem> for Assembler<'a> {
4054    fn vcvttpd2qq_mask(&mut self, op0: Zmm, op1: Mem) {
4055        self.emit(
4056            VCVTTPD2QQ512RM_MASK,
4057            op0.as_operand(),
4058            op1.as_operand(),
4059            &NOREG,
4060            &NOREG,
4061        );
4062    }
4063}
4064
4065/// `VCVTTPD2QQ_MASK_SAE`.
4066///
4067/// Supported operand variants:
4068///
4069/// ```text
4070/// +---+----------+
4071/// | # | Operands |
4072/// +---+----------+
4073/// | 1 | Zmm, Zmm |
4074/// +---+----------+
4075/// ```
4076pub trait Vcvttpd2qqMaskSaeEmitter<A, B> {
4077    fn vcvttpd2qq_mask_sae(&mut self, op0: A, op1: B);
4078}
4079
4080impl<'a> Vcvttpd2qqMaskSaeEmitter<Zmm, Zmm> for Assembler<'a> {
4081    fn vcvttpd2qq_mask_sae(&mut self, op0: Zmm, op1: Zmm) {
4082        self.emit(
4083            VCVTTPD2QQ512RR_MASK_SAE,
4084            op0.as_operand(),
4085            op1.as_operand(),
4086            &NOREG,
4087            &NOREG,
4088        );
4089    }
4090}
4091
4092/// `VCVTTPD2QQ_MASKZ`.
4093///
4094/// Supported operand variants:
4095///
4096/// ```text
4097/// +---+----------+
4098/// | # | Operands |
4099/// +---+----------+
4100/// | 1 | Xmm, Mem |
4101/// | 2 | Xmm, Xmm |
4102/// | 3 | Ymm, Mem |
4103/// | 4 | Ymm, Ymm |
4104/// | 5 | Zmm, Mem |
4105/// | 6 | Zmm, Zmm |
4106/// +---+----------+
4107/// ```
4108pub trait Vcvttpd2qqMaskzEmitter<A, B> {
4109    fn vcvttpd2qq_maskz(&mut self, op0: A, op1: B);
4110}
4111
4112impl<'a> Vcvttpd2qqMaskzEmitter<Xmm, Xmm> for Assembler<'a> {
4113    fn vcvttpd2qq_maskz(&mut self, op0: Xmm, op1: Xmm) {
4114        self.emit(
4115            VCVTTPD2QQ128RR_MASKZ,
4116            op0.as_operand(),
4117            op1.as_operand(),
4118            &NOREG,
4119            &NOREG,
4120        );
4121    }
4122}
4123
4124impl<'a> Vcvttpd2qqMaskzEmitter<Xmm, Mem> for Assembler<'a> {
4125    fn vcvttpd2qq_maskz(&mut self, op0: Xmm, op1: Mem) {
4126        self.emit(
4127            VCVTTPD2QQ128RM_MASKZ,
4128            op0.as_operand(),
4129            op1.as_operand(),
4130            &NOREG,
4131            &NOREG,
4132        );
4133    }
4134}
4135
4136impl<'a> Vcvttpd2qqMaskzEmitter<Ymm, Ymm> for Assembler<'a> {
4137    fn vcvttpd2qq_maskz(&mut self, op0: Ymm, op1: Ymm) {
4138        self.emit(
4139            VCVTTPD2QQ256RR_MASKZ,
4140            op0.as_operand(),
4141            op1.as_operand(),
4142            &NOREG,
4143            &NOREG,
4144        );
4145    }
4146}
4147
4148impl<'a> Vcvttpd2qqMaskzEmitter<Ymm, Mem> for Assembler<'a> {
4149    fn vcvttpd2qq_maskz(&mut self, op0: Ymm, op1: Mem) {
4150        self.emit(
4151            VCVTTPD2QQ256RM_MASKZ,
4152            op0.as_operand(),
4153            op1.as_operand(),
4154            &NOREG,
4155            &NOREG,
4156        );
4157    }
4158}
4159
4160impl<'a> Vcvttpd2qqMaskzEmitter<Zmm, Zmm> for Assembler<'a> {
4161    fn vcvttpd2qq_maskz(&mut self, op0: Zmm, op1: Zmm) {
4162        self.emit(
4163            VCVTTPD2QQ512RR_MASKZ,
4164            op0.as_operand(),
4165            op1.as_operand(),
4166            &NOREG,
4167            &NOREG,
4168        );
4169    }
4170}
4171
4172impl<'a> Vcvttpd2qqMaskzEmitter<Zmm, Mem> for Assembler<'a> {
4173    fn vcvttpd2qq_maskz(&mut self, op0: Zmm, op1: Mem) {
4174        self.emit(
4175            VCVTTPD2QQ512RM_MASKZ,
4176            op0.as_operand(),
4177            op1.as_operand(),
4178            &NOREG,
4179            &NOREG,
4180        );
4181    }
4182}
4183
4184/// `VCVTTPD2QQ_MASKZ_SAE`.
4185///
4186/// Supported operand variants:
4187///
4188/// ```text
4189/// +---+----------+
4190/// | # | Operands |
4191/// +---+----------+
4192/// | 1 | Zmm, Zmm |
4193/// +---+----------+
4194/// ```
4195pub trait Vcvttpd2qqMaskzSaeEmitter<A, B> {
4196    fn vcvttpd2qq_maskz_sae(&mut self, op0: A, op1: B);
4197}
4198
4199impl<'a> Vcvttpd2qqMaskzSaeEmitter<Zmm, Zmm> for Assembler<'a> {
4200    fn vcvttpd2qq_maskz_sae(&mut self, op0: Zmm, op1: Zmm) {
4201        self.emit(
4202            VCVTTPD2QQ512RR_MASKZ_SAE,
4203            op0.as_operand(),
4204            op1.as_operand(),
4205            &NOREG,
4206            &NOREG,
4207        );
4208    }
4209}
4210
4211/// `VCVTTPD2QQ_SAE`.
4212///
4213/// Supported operand variants:
4214///
4215/// ```text
4216/// +---+----------+
4217/// | # | Operands |
4218/// +---+----------+
4219/// | 1 | Zmm, Zmm |
4220/// +---+----------+
4221/// ```
4222pub trait Vcvttpd2qqSaeEmitter<A, B> {
4223    fn vcvttpd2qq_sae(&mut self, op0: A, op1: B);
4224}
4225
4226impl<'a> Vcvttpd2qqSaeEmitter<Zmm, Zmm> for Assembler<'a> {
4227    fn vcvttpd2qq_sae(&mut self, op0: Zmm, op1: Zmm) {
4228        self.emit(
4229            VCVTTPD2QQ512RR_SAE,
4230            op0.as_operand(),
4231            op1.as_operand(),
4232            &NOREG,
4233            &NOREG,
4234        );
4235    }
4236}
4237
4238/// `VCVTTPS2QQ`.
4239///
4240/// Supported operand variants:
4241///
4242/// ```text
4243/// +---+----------+
4244/// | # | Operands |
4245/// +---+----------+
4246/// | 1 | Xmm, Mem |
4247/// | 2 | Xmm, Xmm |
4248/// | 3 | Ymm, Mem |
4249/// | 4 | Ymm, Xmm |
4250/// | 5 | Zmm, Mem |
4251/// | 6 | Zmm, Ymm |
4252/// +---+----------+
4253/// ```
4254pub trait Vcvttps2qqEmitter<A, B> {
4255    fn vcvttps2qq(&mut self, op0: A, op1: B);
4256}
4257
4258impl<'a> Vcvttps2qqEmitter<Xmm, Xmm> for Assembler<'a> {
4259    fn vcvttps2qq(&mut self, op0: Xmm, op1: Xmm) {
4260        self.emit(
4261            VCVTTPS2QQ128RR,
4262            op0.as_operand(),
4263            op1.as_operand(),
4264            &NOREG,
4265            &NOREG,
4266        );
4267    }
4268}
4269
4270impl<'a> Vcvttps2qqEmitter<Xmm, Mem> for Assembler<'a> {
4271    fn vcvttps2qq(&mut self, op0: Xmm, op1: Mem) {
4272        self.emit(
4273            VCVTTPS2QQ128RM,
4274            op0.as_operand(),
4275            op1.as_operand(),
4276            &NOREG,
4277            &NOREG,
4278        );
4279    }
4280}
4281
4282impl<'a> Vcvttps2qqEmitter<Ymm, Xmm> for Assembler<'a> {
4283    fn vcvttps2qq(&mut self, op0: Ymm, op1: Xmm) {
4284        self.emit(
4285            VCVTTPS2QQ256RR,
4286            op0.as_operand(),
4287            op1.as_operand(),
4288            &NOREG,
4289            &NOREG,
4290        );
4291    }
4292}
4293
4294impl<'a> Vcvttps2qqEmitter<Ymm, Mem> for Assembler<'a> {
4295    fn vcvttps2qq(&mut self, op0: Ymm, op1: Mem) {
4296        self.emit(
4297            VCVTTPS2QQ256RM,
4298            op0.as_operand(),
4299            op1.as_operand(),
4300            &NOREG,
4301            &NOREG,
4302        );
4303    }
4304}
4305
4306impl<'a> Vcvttps2qqEmitter<Zmm, Ymm> for Assembler<'a> {
4307    fn vcvttps2qq(&mut self, op0: Zmm, op1: Ymm) {
4308        self.emit(
4309            VCVTTPS2QQ512RR,
4310            op0.as_operand(),
4311            op1.as_operand(),
4312            &NOREG,
4313            &NOREG,
4314        );
4315    }
4316}
4317
4318impl<'a> Vcvttps2qqEmitter<Zmm, Mem> for Assembler<'a> {
4319    fn vcvttps2qq(&mut self, op0: Zmm, op1: Mem) {
4320        self.emit(
4321            VCVTTPS2QQ512RM,
4322            op0.as_operand(),
4323            op1.as_operand(),
4324            &NOREG,
4325            &NOREG,
4326        );
4327    }
4328}
4329
4330/// `VCVTTPS2QQ_MASK`.
4331///
4332/// Supported operand variants:
4333///
4334/// ```text
4335/// +---+----------+
4336/// | # | Operands |
4337/// +---+----------+
4338/// | 1 | Xmm, Mem |
4339/// | 2 | Xmm, Xmm |
4340/// | 3 | Ymm, Mem |
4341/// | 4 | Ymm, Xmm |
4342/// | 5 | Zmm, Mem |
4343/// | 6 | Zmm, Ymm |
4344/// +---+----------+
4345/// ```
4346pub trait Vcvttps2qqMaskEmitter<A, B> {
4347    fn vcvttps2qq_mask(&mut self, op0: A, op1: B);
4348}
4349
4350impl<'a> Vcvttps2qqMaskEmitter<Xmm, Xmm> for Assembler<'a> {
4351    fn vcvttps2qq_mask(&mut self, op0: Xmm, op1: Xmm) {
4352        self.emit(
4353            VCVTTPS2QQ128RR_MASK,
4354            op0.as_operand(),
4355            op1.as_operand(),
4356            &NOREG,
4357            &NOREG,
4358        );
4359    }
4360}
4361
4362impl<'a> Vcvttps2qqMaskEmitter<Xmm, Mem> for Assembler<'a> {
4363    fn vcvttps2qq_mask(&mut self, op0: Xmm, op1: Mem) {
4364        self.emit(
4365            VCVTTPS2QQ128RM_MASK,
4366            op0.as_operand(),
4367            op1.as_operand(),
4368            &NOREG,
4369            &NOREG,
4370        );
4371    }
4372}
4373
4374impl<'a> Vcvttps2qqMaskEmitter<Ymm, Xmm> for Assembler<'a> {
4375    fn vcvttps2qq_mask(&mut self, op0: Ymm, op1: Xmm) {
4376        self.emit(
4377            VCVTTPS2QQ256RR_MASK,
4378            op0.as_operand(),
4379            op1.as_operand(),
4380            &NOREG,
4381            &NOREG,
4382        );
4383    }
4384}
4385
4386impl<'a> Vcvttps2qqMaskEmitter<Ymm, Mem> for Assembler<'a> {
4387    fn vcvttps2qq_mask(&mut self, op0: Ymm, op1: Mem) {
4388        self.emit(
4389            VCVTTPS2QQ256RM_MASK,
4390            op0.as_operand(),
4391            op1.as_operand(),
4392            &NOREG,
4393            &NOREG,
4394        );
4395    }
4396}
4397
4398impl<'a> Vcvttps2qqMaskEmitter<Zmm, Ymm> for Assembler<'a> {
4399    fn vcvttps2qq_mask(&mut self, op0: Zmm, op1: Ymm) {
4400        self.emit(
4401            VCVTTPS2QQ512RR_MASK,
4402            op0.as_operand(),
4403            op1.as_operand(),
4404            &NOREG,
4405            &NOREG,
4406        );
4407    }
4408}
4409
4410impl<'a> Vcvttps2qqMaskEmitter<Zmm, Mem> for Assembler<'a> {
4411    fn vcvttps2qq_mask(&mut self, op0: Zmm, op1: Mem) {
4412        self.emit(
4413            VCVTTPS2QQ512RM_MASK,
4414            op0.as_operand(),
4415            op1.as_operand(),
4416            &NOREG,
4417            &NOREG,
4418        );
4419    }
4420}
4421
4422/// `VCVTTPS2QQ_MASK_SAE`.
4423///
4424/// Supported operand variants:
4425///
4426/// ```text
4427/// +---+----------+
4428/// | # | Operands |
4429/// +---+----------+
4430/// | 1 | Zmm, Ymm |
4431/// +---+----------+
4432/// ```
4433pub trait Vcvttps2qqMaskSaeEmitter<A, B> {
4434    fn vcvttps2qq_mask_sae(&mut self, op0: A, op1: B);
4435}
4436
4437impl<'a> Vcvttps2qqMaskSaeEmitter<Zmm, Ymm> for Assembler<'a> {
4438    fn vcvttps2qq_mask_sae(&mut self, op0: Zmm, op1: Ymm) {
4439        self.emit(
4440            VCVTTPS2QQ512RR_MASK_SAE,
4441            op0.as_operand(),
4442            op1.as_operand(),
4443            &NOREG,
4444            &NOREG,
4445        );
4446    }
4447}
4448
4449/// `VCVTTPS2QQ_MASKZ`.
4450///
4451/// Supported operand variants:
4452///
4453/// ```text
4454/// +---+----------+
4455/// | # | Operands |
4456/// +---+----------+
4457/// | 1 | Xmm, Mem |
4458/// | 2 | Xmm, Xmm |
4459/// | 3 | Ymm, Mem |
4460/// | 4 | Ymm, Xmm |
4461/// | 5 | Zmm, Mem |
4462/// | 6 | Zmm, Ymm |
4463/// +---+----------+
4464/// ```
4465pub trait Vcvttps2qqMaskzEmitter<A, B> {
4466    fn vcvttps2qq_maskz(&mut self, op0: A, op1: B);
4467}
4468
4469impl<'a> Vcvttps2qqMaskzEmitter<Xmm, Xmm> for Assembler<'a> {
4470    fn vcvttps2qq_maskz(&mut self, op0: Xmm, op1: Xmm) {
4471        self.emit(
4472            VCVTTPS2QQ128RR_MASKZ,
4473            op0.as_operand(),
4474            op1.as_operand(),
4475            &NOREG,
4476            &NOREG,
4477        );
4478    }
4479}
4480
4481impl<'a> Vcvttps2qqMaskzEmitter<Xmm, Mem> for Assembler<'a> {
4482    fn vcvttps2qq_maskz(&mut self, op0: Xmm, op1: Mem) {
4483        self.emit(
4484            VCVTTPS2QQ128RM_MASKZ,
4485            op0.as_operand(),
4486            op1.as_operand(),
4487            &NOREG,
4488            &NOREG,
4489        );
4490    }
4491}
4492
4493impl<'a> Vcvttps2qqMaskzEmitter<Ymm, Xmm> for Assembler<'a> {
4494    fn vcvttps2qq_maskz(&mut self, op0: Ymm, op1: Xmm) {
4495        self.emit(
4496            VCVTTPS2QQ256RR_MASKZ,
4497            op0.as_operand(),
4498            op1.as_operand(),
4499            &NOREG,
4500            &NOREG,
4501        );
4502    }
4503}
4504
4505impl<'a> Vcvttps2qqMaskzEmitter<Ymm, Mem> for Assembler<'a> {
4506    fn vcvttps2qq_maskz(&mut self, op0: Ymm, op1: Mem) {
4507        self.emit(
4508            VCVTTPS2QQ256RM_MASKZ,
4509            op0.as_operand(),
4510            op1.as_operand(),
4511            &NOREG,
4512            &NOREG,
4513        );
4514    }
4515}
4516
4517impl<'a> Vcvttps2qqMaskzEmitter<Zmm, Ymm> for Assembler<'a> {
4518    fn vcvttps2qq_maskz(&mut self, op0: Zmm, op1: Ymm) {
4519        self.emit(
4520            VCVTTPS2QQ512RR_MASKZ,
4521            op0.as_operand(),
4522            op1.as_operand(),
4523            &NOREG,
4524            &NOREG,
4525        );
4526    }
4527}
4528
4529impl<'a> Vcvttps2qqMaskzEmitter<Zmm, Mem> for Assembler<'a> {
4530    fn vcvttps2qq_maskz(&mut self, op0: Zmm, op1: Mem) {
4531        self.emit(
4532            VCVTTPS2QQ512RM_MASKZ,
4533            op0.as_operand(),
4534            op1.as_operand(),
4535            &NOREG,
4536            &NOREG,
4537        );
4538    }
4539}
4540
4541/// `VCVTTPS2QQ_MASKZ_SAE`.
4542///
4543/// Supported operand variants:
4544///
4545/// ```text
4546/// +---+----------+
4547/// | # | Operands |
4548/// +---+----------+
4549/// | 1 | Zmm, Ymm |
4550/// +---+----------+
4551/// ```
4552pub trait Vcvttps2qqMaskzSaeEmitter<A, B> {
4553    fn vcvttps2qq_maskz_sae(&mut self, op0: A, op1: B);
4554}
4555
4556impl<'a> Vcvttps2qqMaskzSaeEmitter<Zmm, Ymm> for Assembler<'a> {
4557    fn vcvttps2qq_maskz_sae(&mut self, op0: Zmm, op1: Ymm) {
4558        self.emit(
4559            VCVTTPS2QQ512RR_MASKZ_SAE,
4560            op0.as_operand(),
4561            op1.as_operand(),
4562            &NOREG,
4563            &NOREG,
4564        );
4565    }
4566}
4567
4568/// `VCVTTPS2QQ_SAE`.
4569///
4570/// Supported operand variants:
4571///
4572/// ```text
4573/// +---+----------+
4574/// | # | Operands |
4575/// +---+----------+
4576/// | 1 | Zmm, Ymm |
4577/// +---+----------+
4578/// ```
4579pub trait Vcvttps2qqSaeEmitter<A, B> {
4580    fn vcvttps2qq_sae(&mut self, op0: A, op1: B);
4581}
4582
4583impl<'a> Vcvttps2qqSaeEmitter<Zmm, Ymm> for Assembler<'a> {
4584    fn vcvttps2qq_sae(&mut self, op0: Zmm, op1: Ymm) {
4585        self.emit(
4586            VCVTTPS2QQ512RR_SAE,
4587            op0.as_operand(),
4588            op1.as_operand(),
4589            &NOREG,
4590            &NOREG,
4591        );
4592    }
4593}
4594
4595/// `VFPCLASSPD`.
4596///
4597/// Supported operand variants:
4598///
4599/// ```text
4600/// +---+----------------+
4601/// | # | Operands       |
4602/// +---+----------------+
4603/// | 1 | KReg, Mem, Imm |
4604/// | 2 | KReg, Xmm, Imm |
4605/// | 3 | KReg, Ymm, Imm |
4606/// | 4 | KReg, Zmm, Imm |
4607/// +---+----------------+
4608/// ```
4609pub trait VfpclasspdEmitter<A, B, C> {
4610    fn vfpclasspd(&mut self, op0: A, op1: B, op2: C);
4611}
4612
4613impl<'a> VfpclasspdEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4614    fn vfpclasspd(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4615        self.emit(
4616            VFPCLASSPD128KRI,
4617            op0.as_operand(),
4618            op1.as_operand(),
4619            op2.as_operand(),
4620            &NOREG,
4621        );
4622    }
4623}
4624
4625impl<'a> VfpclasspdEmitter<KReg, Mem, Imm> for Assembler<'a> {
4626    fn vfpclasspd(&mut self, op0: KReg, op1: Mem, op2: Imm) {
4627        self.emit(
4628            VFPCLASSPD128KMI,
4629            op0.as_operand(),
4630            op1.as_operand(),
4631            op2.as_operand(),
4632            &NOREG,
4633        );
4634    }
4635}
4636
4637impl<'a> VfpclasspdEmitter<KReg, Ymm, Imm> for Assembler<'a> {
4638    fn vfpclasspd(&mut self, op0: KReg, op1: Ymm, op2: Imm) {
4639        self.emit(
4640            VFPCLASSPD256KRI,
4641            op0.as_operand(),
4642            op1.as_operand(),
4643            op2.as_operand(),
4644            &NOREG,
4645        );
4646    }
4647}
4648
4649impl<'a> VfpclasspdEmitter<KReg, Zmm, Imm> for Assembler<'a> {
4650    fn vfpclasspd(&mut self, op0: KReg, op1: Zmm, op2: Imm) {
4651        self.emit(
4652            VFPCLASSPD512KRI,
4653            op0.as_operand(),
4654            op1.as_operand(),
4655            op2.as_operand(),
4656            &NOREG,
4657        );
4658    }
4659}
4660
4661/// `VFPCLASSPD_MASK`.
4662///
4663/// Supported operand variants:
4664///
4665/// ```text
4666/// +---+----------------+
4667/// | # | Operands       |
4668/// +---+----------------+
4669/// | 1 | KReg, Mem, Imm |
4670/// | 2 | KReg, Xmm, Imm |
4671/// | 3 | KReg, Ymm, Imm |
4672/// | 4 | KReg, Zmm, Imm |
4673/// +---+----------------+
4674/// ```
4675pub trait VfpclasspdMaskEmitter<A, B, C> {
4676    fn vfpclasspd_mask(&mut self, op0: A, op1: B, op2: C);
4677}
4678
4679impl<'a> VfpclasspdMaskEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4680    fn vfpclasspd_mask(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4681        self.emit(
4682            VFPCLASSPD128KRI_MASK,
4683            op0.as_operand(),
4684            op1.as_operand(),
4685            op2.as_operand(),
4686            &NOREG,
4687        );
4688    }
4689}
4690
4691impl<'a> VfpclasspdMaskEmitter<KReg, Mem, Imm> for Assembler<'a> {
4692    fn vfpclasspd_mask(&mut self, op0: KReg, op1: Mem, op2: Imm) {
4693        self.emit(
4694            VFPCLASSPD128KMI_MASK,
4695            op0.as_operand(),
4696            op1.as_operand(),
4697            op2.as_operand(),
4698            &NOREG,
4699        );
4700    }
4701}
4702
4703impl<'a> VfpclasspdMaskEmitter<KReg, Ymm, Imm> for Assembler<'a> {
4704    fn vfpclasspd_mask(&mut self, op0: KReg, op1: Ymm, op2: Imm) {
4705        self.emit(
4706            VFPCLASSPD256KRI_MASK,
4707            op0.as_operand(),
4708            op1.as_operand(),
4709            op2.as_operand(),
4710            &NOREG,
4711        );
4712    }
4713}
4714
4715impl<'a> VfpclasspdMaskEmitter<KReg, Zmm, Imm> for Assembler<'a> {
4716    fn vfpclasspd_mask(&mut self, op0: KReg, op1: Zmm, op2: Imm) {
4717        self.emit(
4718            VFPCLASSPD512KRI_MASK,
4719            op0.as_operand(),
4720            op1.as_operand(),
4721            op2.as_operand(),
4722            &NOREG,
4723        );
4724    }
4725}
4726
4727/// `VFPCLASSPS`.
4728///
4729/// Supported operand variants:
4730///
4731/// ```text
4732/// +---+----------------+
4733/// | # | Operands       |
4734/// +---+----------------+
4735/// | 1 | KReg, Mem, Imm |
4736/// | 2 | KReg, Xmm, Imm |
4737/// | 3 | KReg, Ymm, Imm |
4738/// | 4 | KReg, Zmm, Imm |
4739/// +---+----------------+
4740/// ```
4741pub trait VfpclasspsEmitter<A, B, C> {
4742    fn vfpclassps(&mut self, op0: A, op1: B, op2: C);
4743}
4744
4745impl<'a> VfpclasspsEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4746    fn vfpclassps(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4747        self.emit(
4748            VFPCLASSPS128KRI,
4749            op0.as_operand(),
4750            op1.as_operand(),
4751            op2.as_operand(),
4752            &NOREG,
4753        );
4754    }
4755}
4756
4757impl<'a> VfpclasspsEmitter<KReg, Mem, Imm> for Assembler<'a> {
4758    fn vfpclassps(&mut self, op0: KReg, op1: Mem, op2: Imm) {
4759        self.emit(
4760            VFPCLASSPS128KMI,
4761            op0.as_operand(),
4762            op1.as_operand(),
4763            op2.as_operand(),
4764            &NOREG,
4765        );
4766    }
4767}
4768
4769impl<'a> VfpclasspsEmitter<KReg, Ymm, Imm> for Assembler<'a> {
4770    fn vfpclassps(&mut self, op0: KReg, op1: Ymm, op2: Imm) {
4771        self.emit(
4772            VFPCLASSPS256KRI,
4773            op0.as_operand(),
4774            op1.as_operand(),
4775            op2.as_operand(),
4776            &NOREG,
4777        );
4778    }
4779}
4780
4781impl<'a> VfpclasspsEmitter<KReg, Zmm, Imm> for Assembler<'a> {
4782    fn vfpclassps(&mut self, op0: KReg, op1: Zmm, op2: Imm) {
4783        self.emit(
4784            VFPCLASSPS512KRI,
4785            op0.as_operand(),
4786            op1.as_operand(),
4787            op2.as_operand(),
4788            &NOREG,
4789        );
4790    }
4791}
4792
4793/// `VFPCLASSPS_MASK`.
4794///
4795/// Supported operand variants:
4796///
4797/// ```text
4798/// +---+----------------+
4799/// | # | Operands       |
4800/// +---+----------------+
4801/// | 1 | KReg, Mem, Imm |
4802/// | 2 | KReg, Xmm, Imm |
4803/// | 3 | KReg, Ymm, Imm |
4804/// | 4 | KReg, Zmm, Imm |
4805/// +---+----------------+
4806/// ```
4807pub trait VfpclasspsMaskEmitter<A, B, C> {
4808    fn vfpclassps_mask(&mut self, op0: A, op1: B, op2: C);
4809}
4810
4811impl<'a> VfpclasspsMaskEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4812    fn vfpclassps_mask(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4813        self.emit(
4814            VFPCLASSPS128KRI_MASK,
4815            op0.as_operand(),
4816            op1.as_operand(),
4817            op2.as_operand(),
4818            &NOREG,
4819        );
4820    }
4821}
4822
4823impl<'a> VfpclasspsMaskEmitter<KReg, Mem, Imm> for Assembler<'a> {
4824    fn vfpclassps_mask(&mut self, op0: KReg, op1: Mem, op2: Imm) {
4825        self.emit(
4826            VFPCLASSPS128KMI_MASK,
4827            op0.as_operand(),
4828            op1.as_operand(),
4829            op2.as_operand(),
4830            &NOREG,
4831        );
4832    }
4833}
4834
4835impl<'a> VfpclasspsMaskEmitter<KReg, Ymm, Imm> for Assembler<'a> {
4836    fn vfpclassps_mask(&mut self, op0: KReg, op1: Ymm, op2: Imm) {
4837        self.emit(
4838            VFPCLASSPS256KRI_MASK,
4839            op0.as_operand(),
4840            op1.as_operand(),
4841            op2.as_operand(),
4842            &NOREG,
4843        );
4844    }
4845}
4846
4847impl<'a> VfpclasspsMaskEmitter<KReg, Zmm, Imm> for Assembler<'a> {
4848    fn vfpclassps_mask(&mut self, op0: KReg, op1: Zmm, op2: Imm) {
4849        self.emit(
4850            VFPCLASSPS512KRI_MASK,
4851            op0.as_operand(),
4852            op1.as_operand(),
4853            op2.as_operand(),
4854            &NOREG,
4855        );
4856    }
4857}
4858
4859/// `VFPCLASSSD`.
4860///
4861/// Supported operand variants:
4862///
4863/// ```text
4864/// +---+----------------+
4865/// | # | Operands       |
4866/// +---+----------------+
4867/// | 1 | KReg, Mem, Imm |
4868/// | 2 | KReg, Xmm, Imm |
4869/// +---+----------------+
4870/// ```
4871pub trait VfpclasssdEmitter<A, B, C> {
4872    fn vfpclasssd(&mut self, op0: A, op1: B, op2: C);
4873}
4874
4875impl<'a> VfpclasssdEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4876    fn vfpclasssd(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4877        self.emit(
4878            VFPCLASSSDKRI,
4879            op0.as_operand(),
4880            op1.as_operand(),
4881            op2.as_operand(),
4882            &NOREG,
4883        );
4884    }
4885}
4886
4887impl<'a> VfpclasssdEmitter<KReg, Mem, Imm> for Assembler<'a> {
4888    fn vfpclasssd(&mut self, op0: KReg, op1: Mem, op2: Imm) {
4889        self.emit(
4890            VFPCLASSSDKMI,
4891            op0.as_operand(),
4892            op1.as_operand(),
4893            op2.as_operand(),
4894            &NOREG,
4895        );
4896    }
4897}
4898
4899/// `VFPCLASSSD_MASK`.
4900///
4901/// Supported operand variants:
4902///
4903/// ```text
4904/// +---+----------------+
4905/// | # | Operands       |
4906/// +---+----------------+
4907/// | 1 | KReg, Mem, Imm |
4908/// | 2 | KReg, Xmm, Imm |
4909/// +---+----------------+
4910/// ```
4911pub trait VfpclasssdMaskEmitter<A, B, C> {
4912    fn vfpclasssd_mask(&mut self, op0: A, op1: B, op2: C);
4913}
4914
4915impl<'a> VfpclasssdMaskEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4916    fn vfpclasssd_mask(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4917        self.emit(
4918            VFPCLASSSDKRI_MASK,
4919            op0.as_operand(),
4920            op1.as_operand(),
4921            op2.as_operand(),
4922            &NOREG,
4923        );
4924    }
4925}
4926
4927impl<'a> VfpclasssdMaskEmitter<KReg, Mem, Imm> for Assembler<'a> {
4928    fn vfpclasssd_mask(&mut self, op0: KReg, op1: Mem, op2: Imm) {
4929        self.emit(
4930            VFPCLASSSDKMI_MASK,
4931            op0.as_operand(),
4932            op1.as_operand(),
4933            op2.as_operand(),
4934            &NOREG,
4935        );
4936    }
4937}
4938
4939/// `VFPCLASSSS`.
4940///
4941/// Supported operand variants:
4942///
4943/// ```text
4944/// +---+----------------+
4945/// | # | Operands       |
4946/// +---+----------------+
4947/// | 1 | KReg, Mem, Imm |
4948/// | 2 | KReg, Xmm, Imm |
4949/// +---+----------------+
4950/// ```
4951pub trait VfpclassssEmitter<A, B, C> {
4952    fn vfpclassss(&mut self, op0: A, op1: B, op2: C);
4953}
4954
4955impl<'a> VfpclassssEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4956    fn vfpclassss(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4957        self.emit(
4958            VFPCLASSSSKRI,
4959            op0.as_operand(),
4960            op1.as_operand(),
4961            op2.as_operand(),
4962            &NOREG,
4963        );
4964    }
4965}
4966
4967impl<'a> VfpclassssEmitter<KReg, Mem, Imm> for Assembler<'a> {
4968    fn vfpclassss(&mut self, op0: KReg, op1: Mem, op2: Imm) {
4969        self.emit(
4970            VFPCLASSSSKMI,
4971            op0.as_operand(),
4972            op1.as_operand(),
4973            op2.as_operand(),
4974            &NOREG,
4975        );
4976    }
4977}
4978
4979/// `VFPCLASSSS_MASK`.
4980///
4981/// Supported operand variants:
4982///
4983/// ```text
4984/// +---+----------------+
4985/// | # | Operands       |
4986/// +---+----------------+
4987/// | 1 | KReg, Mem, Imm |
4988/// | 2 | KReg, Xmm, Imm |
4989/// +---+----------------+
4990/// ```
4991pub trait VfpclassssMaskEmitter<A, B, C> {
4992    fn vfpclassss_mask(&mut self, op0: A, op1: B, op2: C);
4993}
4994
4995impl<'a> VfpclassssMaskEmitter<KReg, Xmm, Imm> for Assembler<'a> {
4996    fn vfpclassss_mask(&mut self, op0: KReg, op1: Xmm, op2: Imm) {
4997        self.emit(
4998            VFPCLASSSSKRI_MASK,
4999            op0.as_operand(),
5000            op1.as_operand(),
5001            op2.as_operand(),
5002            &NOREG,
5003        );
5004    }
5005}
5006
5007impl<'a> VfpclassssMaskEmitter<KReg, Mem, Imm> for Assembler<'a> {
5008    fn vfpclassss_mask(&mut self, op0: KReg, op1: Mem, op2: Imm) {
5009        self.emit(
5010            VFPCLASSSSKMI_MASK,
5011            op0.as_operand(),
5012            op1.as_operand(),
5013            op2.as_operand(),
5014            &NOREG,
5015        );
5016    }
5017}
5018
5019/// `VINSERTF32X8`.
5020///
5021/// Supported operand variants:
5022///
5023/// ```text
5024/// +---+--------------------+
5025/// | # | Operands           |
5026/// +---+--------------------+
5027/// | 1 | Zmm, Zmm, Mem, Imm |
5028/// | 2 | Zmm, Zmm, Ymm, Imm |
5029/// +---+--------------------+
5030/// ```
5031pub trait Vinsertf32x8Emitter<A, B, C, D> {
5032    fn vinsertf32x8(&mut self, op0: A, op1: B, op2: C, op3: D);
5033}
5034
5035impl<'a> Vinsertf32x8Emitter<Zmm, Zmm, Ymm, Imm> for Assembler<'a> {
5036    fn vinsertf32x8(&mut self, op0: Zmm, op1: Zmm, op2: Ymm, op3: Imm) {
5037        self.emit(
5038            VINSERTF32X8_512RRRI,
5039            op0.as_operand(),
5040            op1.as_operand(),
5041            op2.as_operand(),
5042            op3.as_operand(),
5043        );
5044    }
5045}
5046
5047impl<'a> Vinsertf32x8Emitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5048    fn vinsertf32x8(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5049        self.emit(
5050            VINSERTF32X8_512RRMI,
5051            op0.as_operand(),
5052            op1.as_operand(),
5053            op2.as_operand(),
5054            op3.as_operand(),
5055        );
5056    }
5057}
5058
5059/// `VINSERTF32X8_MASK`.
5060///
5061/// Supported operand variants:
5062///
5063/// ```text
5064/// +---+--------------------+
5065/// | # | Operands           |
5066/// +---+--------------------+
5067/// | 1 | Zmm, Zmm, Mem, Imm |
5068/// | 2 | Zmm, Zmm, Ymm, Imm |
5069/// +---+--------------------+
5070/// ```
5071pub trait Vinsertf32x8MaskEmitter<A, B, C, D> {
5072    fn vinsertf32x8_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
5073}
5074
5075impl<'a> Vinsertf32x8MaskEmitter<Zmm, Zmm, Ymm, Imm> for Assembler<'a> {
5076    fn vinsertf32x8_mask(&mut self, op0: Zmm, op1: Zmm, op2: Ymm, op3: Imm) {
5077        self.emit(
5078            VINSERTF32X8_512RRRI_MASK,
5079            op0.as_operand(),
5080            op1.as_operand(),
5081            op2.as_operand(),
5082            op3.as_operand(),
5083        );
5084    }
5085}
5086
5087impl<'a> Vinsertf32x8MaskEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5088    fn vinsertf32x8_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5089        self.emit(
5090            VINSERTF32X8_512RRMI_MASK,
5091            op0.as_operand(),
5092            op1.as_operand(),
5093            op2.as_operand(),
5094            op3.as_operand(),
5095        );
5096    }
5097}
5098
5099/// `VINSERTF32X8_MASKZ`.
5100///
5101/// Supported operand variants:
5102///
5103/// ```text
5104/// +---+--------------------+
5105/// | # | Operands           |
5106/// +---+--------------------+
5107/// | 1 | Zmm, Zmm, Mem, Imm |
5108/// | 2 | Zmm, Zmm, Ymm, Imm |
5109/// +---+--------------------+
5110/// ```
5111pub trait Vinsertf32x8MaskzEmitter<A, B, C, D> {
5112    fn vinsertf32x8_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
5113}
5114
5115impl<'a> Vinsertf32x8MaskzEmitter<Zmm, Zmm, Ymm, Imm> for Assembler<'a> {
5116    fn vinsertf32x8_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Ymm, op3: Imm) {
5117        self.emit(
5118            VINSERTF32X8_512RRRI_MASKZ,
5119            op0.as_operand(),
5120            op1.as_operand(),
5121            op2.as_operand(),
5122            op3.as_operand(),
5123        );
5124    }
5125}
5126
5127impl<'a> Vinsertf32x8MaskzEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5128    fn vinsertf32x8_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5129        self.emit(
5130            VINSERTF32X8_512RRMI_MASKZ,
5131            op0.as_operand(),
5132            op1.as_operand(),
5133            op2.as_operand(),
5134            op3.as_operand(),
5135        );
5136    }
5137}
5138
5139/// `VINSERTF64X2`.
5140///
5141/// Supported operand variants:
5142///
5143/// ```text
5144/// +---+--------------------+
5145/// | # | Operands           |
5146/// +---+--------------------+
5147/// | 1 | Ymm, Ymm, Mem, Imm |
5148/// | 2 | Ymm, Ymm, Xmm, Imm |
5149/// | 3 | Zmm, Zmm, Mem, Imm |
5150/// | 4 | Zmm, Zmm, Xmm, Imm |
5151/// +---+--------------------+
5152/// ```
5153pub trait Vinsertf64x2Emitter<A, B, C, D> {
5154    fn vinsertf64x2(&mut self, op0: A, op1: B, op2: C, op3: D);
5155}
5156
5157impl<'a> Vinsertf64x2Emitter<Ymm, Ymm, Xmm, Imm> for Assembler<'a> {
5158    fn vinsertf64x2(&mut self, op0: Ymm, op1: Ymm, op2: Xmm, op3: Imm) {
5159        self.emit(
5160            VINSERTF64X2_256RRRI,
5161            op0.as_operand(),
5162            op1.as_operand(),
5163            op2.as_operand(),
5164            op3.as_operand(),
5165        );
5166    }
5167}
5168
5169impl<'a> Vinsertf64x2Emitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
5170    fn vinsertf64x2(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
5171        self.emit(
5172            VINSERTF64X2_256RRMI,
5173            op0.as_operand(),
5174            op1.as_operand(),
5175            op2.as_operand(),
5176            op3.as_operand(),
5177        );
5178    }
5179}
5180
5181impl<'a> Vinsertf64x2Emitter<Zmm, Zmm, Xmm, Imm> for Assembler<'a> {
5182    fn vinsertf64x2(&mut self, op0: Zmm, op1: Zmm, op2: Xmm, op3: Imm) {
5183        self.emit(
5184            VINSERTF64X2_512RRRI,
5185            op0.as_operand(),
5186            op1.as_operand(),
5187            op2.as_operand(),
5188            op3.as_operand(),
5189        );
5190    }
5191}
5192
5193impl<'a> Vinsertf64x2Emitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5194    fn vinsertf64x2(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5195        self.emit(
5196            VINSERTF64X2_512RRMI,
5197            op0.as_operand(),
5198            op1.as_operand(),
5199            op2.as_operand(),
5200            op3.as_operand(),
5201        );
5202    }
5203}
5204
5205/// `VINSERTF64X2_MASK`.
5206///
5207/// Supported operand variants:
5208///
5209/// ```text
5210/// +---+--------------------+
5211/// | # | Operands           |
5212/// +---+--------------------+
5213/// | 1 | Ymm, Ymm, Mem, Imm |
5214/// | 2 | Ymm, Ymm, Xmm, Imm |
5215/// | 3 | Zmm, Zmm, Mem, Imm |
5216/// | 4 | Zmm, Zmm, Xmm, Imm |
5217/// +---+--------------------+
5218/// ```
5219pub trait Vinsertf64x2MaskEmitter<A, B, C, D> {
5220    fn vinsertf64x2_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
5221}
5222
5223impl<'a> Vinsertf64x2MaskEmitter<Ymm, Ymm, Xmm, Imm> for Assembler<'a> {
5224    fn vinsertf64x2_mask(&mut self, op0: Ymm, op1: Ymm, op2: Xmm, op3: Imm) {
5225        self.emit(
5226            VINSERTF64X2_256RRRI_MASK,
5227            op0.as_operand(),
5228            op1.as_operand(),
5229            op2.as_operand(),
5230            op3.as_operand(),
5231        );
5232    }
5233}
5234
5235impl<'a> Vinsertf64x2MaskEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
5236    fn vinsertf64x2_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
5237        self.emit(
5238            VINSERTF64X2_256RRMI_MASK,
5239            op0.as_operand(),
5240            op1.as_operand(),
5241            op2.as_operand(),
5242            op3.as_operand(),
5243        );
5244    }
5245}
5246
5247impl<'a> Vinsertf64x2MaskEmitter<Zmm, Zmm, Xmm, Imm> for Assembler<'a> {
5248    fn vinsertf64x2_mask(&mut self, op0: Zmm, op1: Zmm, op2: Xmm, op3: Imm) {
5249        self.emit(
5250            VINSERTF64X2_512RRRI_MASK,
5251            op0.as_operand(),
5252            op1.as_operand(),
5253            op2.as_operand(),
5254            op3.as_operand(),
5255        );
5256    }
5257}
5258
5259impl<'a> Vinsertf64x2MaskEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5260    fn vinsertf64x2_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5261        self.emit(
5262            VINSERTF64X2_512RRMI_MASK,
5263            op0.as_operand(),
5264            op1.as_operand(),
5265            op2.as_operand(),
5266            op3.as_operand(),
5267        );
5268    }
5269}
5270
5271/// `VINSERTF64X2_MASKZ`.
5272///
5273/// Supported operand variants:
5274///
5275/// ```text
5276/// +---+--------------------+
5277/// | # | Operands           |
5278/// +---+--------------------+
5279/// | 1 | Ymm, Ymm, Mem, Imm |
5280/// | 2 | Ymm, Ymm, Xmm, Imm |
5281/// | 3 | Zmm, Zmm, Mem, Imm |
5282/// | 4 | Zmm, Zmm, Xmm, Imm |
5283/// +---+--------------------+
5284/// ```
5285pub trait Vinsertf64x2MaskzEmitter<A, B, C, D> {
5286    fn vinsertf64x2_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
5287}
5288
5289impl<'a> Vinsertf64x2MaskzEmitter<Ymm, Ymm, Xmm, Imm> for Assembler<'a> {
5290    fn vinsertf64x2_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Xmm, op3: Imm) {
5291        self.emit(
5292            VINSERTF64X2_256RRRI_MASKZ,
5293            op0.as_operand(),
5294            op1.as_operand(),
5295            op2.as_operand(),
5296            op3.as_operand(),
5297        );
5298    }
5299}
5300
5301impl<'a> Vinsertf64x2MaskzEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
5302    fn vinsertf64x2_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
5303        self.emit(
5304            VINSERTF64X2_256RRMI_MASKZ,
5305            op0.as_operand(),
5306            op1.as_operand(),
5307            op2.as_operand(),
5308            op3.as_operand(),
5309        );
5310    }
5311}
5312
5313impl<'a> Vinsertf64x2MaskzEmitter<Zmm, Zmm, Xmm, Imm> for Assembler<'a> {
5314    fn vinsertf64x2_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Xmm, op3: Imm) {
5315        self.emit(
5316            VINSERTF64X2_512RRRI_MASKZ,
5317            op0.as_operand(),
5318            op1.as_operand(),
5319            op2.as_operand(),
5320            op3.as_operand(),
5321        );
5322    }
5323}
5324
5325impl<'a> Vinsertf64x2MaskzEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5326    fn vinsertf64x2_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5327        self.emit(
5328            VINSERTF64X2_512RRMI_MASKZ,
5329            op0.as_operand(),
5330            op1.as_operand(),
5331            op2.as_operand(),
5332            op3.as_operand(),
5333        );
5334    }
5335}
5336
5337/// `VINSERTI32X8`.
5338///
5339/// Supported operand variants:
5340///
5341/// ```text
5342/// +---+--------------------+
5343/// | # | Operands           |
5344/// +---+--------------------+
5345/// | 1 | Zmm, Zmm, Mem, Imm |
5346/// | 2 | Zmm, Zmm, Ymm, Imm |
5347/// +---+--------------------+
5348/// ```
5349pub trait Vinserti32x8Emitter<A, B, C, D> {
5350    fn vinserti32x8(&mut self, op0: A, op1: B, op2: C, op3: D);
5351}
5352
5353impl<'a> Vinserti32x8Emitter<Zmm, Zmm, Ymm, Imm> for Assembler<'a> {
5354    fn vinserti32x8(&mut self, op0: Zmm, op1: Zmm, op2: Ymm, op3: Imm) {
5355        self.emit(
5356            VINSERTI32X8_512RRRI,
5357            op0.as_operand(),
5358            op1.as_operand(),
5359            op2.as_operand(),
5360            op3.as_operand(),
5361        );
5362    }
5363}
5364
5365impl<'a> Vinserti32x8Emitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5366    fn vinserti32x8(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5367        self.emit(
5368            VINSERTI32X8_512RRMI,
5369            op0.as_operand(),
5370            op1.as_operand(),
5371            op2.as_operand(),
5372            op3.as_operand(),
5373        );
5374    }
5375}
5376
5377/// `VINSERTI32X8_MASK`.
5378///
5379/// Supported operand variants:
5380///
5381/// ```text
5382/// +---+--------------------+
5383/// | # | Operands           |
5384/// +---+--------------------+
5385/// | 1 | Zmm, Zmm, Mem, Imm |
5386/// | 2 | Zmm, Zmm, Ymm, Imm |
5387/// +---+--------------------+
5388/// ```
5389pub trait Vinserti32x8MaskEmitter<A, B, C, D> {
5390    fn vinserti32x8_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
5391}
5392
5393impl<'a> Vinserti32x8MaskEmitter<Zmm, Zmm, Ymm, Imm> for Assembler<'a> {
5394    fn vinserti32x8_mask(&mut self, op0: Zmm, op1: Zmm, op2: Ymm, op3: Imm) {
5395        self.emit(
5396            VINSERTI32X8_512RRRI_MASK,
5397            op0.as_operand(),
5398            op1.as_operand(),
5399            op2.as_operand(),
5400            op3.as_operand(),
5401        );
5402    }
5403}
5404
5405impl<'a> Vinserti32x8MaskEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5406    fn vinserti32x8_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5407        self.emit(
5408            VINSERTI32X8_512RRMI_MASK,
5409            op0.as_operand(),
5410            op1.as_operand(),
5411            op2.as_operand(),
5412            op3.as_operand(),
5413        );
5414    }
5415}
5416
5417/// `VINSERTI32X8_MASKZ`.
5418///
5419/// Supported operand variants:
5420///
5421/// ```text
5422/// +---+--------------------+
5423/// | # | Operands           |
5424/// +---+--------------------+
5425/// | 1 | Zmm, Zmm, Mem, Imm |
5426/// | 2 | Zmm, Zmm, Ymm, Imm |
5427/// +---+--------------------+
5428/// ```
5429pub trait Vinserti32x8MaskzEmitter<A, B, C, D> {
5430    fn vinserti32x8_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
5431}
5432
5433impl<'a> Vinserti32x8MaskzEmitter<Zmm, Zmm, Ymm, Imm> for Assembler<'a> {
5434    fn vinserti32x8_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Ymm, op3: Imm) {
5435        self.emit(
5436            VINSERTI32X8_512RRRI_MASKZ,
5437            op0.as_operand(),
5438            op1.as_operand(),
5439            op2.as_operand(),
5440            op3.as_operand(),
5441        );
5442    }
5443}
5444
5445impl<'a> Vinserti32x8MaskzEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5446    fn vinserti32x8_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5447        self.emit(
5448            VINSERTI32X8_512RRMI_MASKZ,
5449            op0.as_operand(),
5450            op1.as_operand(),
5451            op2.as_operand(),
5452            op3.as_operand(),
5453        );
5454    }
5455}
5456
5457/// `VINSERTI64X2`.
5458///
5459/// Supported operand variants:
5460///
5461/// ```text
5462/// +---+--------------------+
5463/// | # | Operands           |
5464/// +---+--------------------+
5465/// | 1 | Ymm, Ymm, Mem, Imm |
5466/// | 2 | Ymm, Ymm, Xmm, Imm |
5467/// | 3 | Zmm, Zmm, Mem, Imm |
5468/// | 4 | Zmm, Zmm, Xmm, Imm |
5469/// +---+--------------------+
5470/// ```
5471pub trait Vinserti64x2Emitter<A, B, C, D> {
5472    fn vinserti64x2(&mut self, op0: A, op1: B, op2: C, op3: D);
5473}
5474
5475impl<'a> Vinserti64x2Emitter<Ymm, Ymm, Xmm, Imm> for Assembler<'a> {
5476    fn vinserti64x2(&mut self, op0: Ymm, op1: Ymm, op2: Xmm, op3: Imm) {
5477        self.emit(
5478            VINSERTI64X2_256RRRI,
5479            op0.as_operand(),
5480            op1.as_operand(),
5481            op2.as_operand(),
5482            op3.as_operand(),
5483        );
5484    }
5485}
5486
5487impl<'a> Vinserti64x2Emitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
5488    fn vinserti64x2(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
5489        self.emit(
5490            VINSERTI64X2_256RRMI,
5491            op0.as_operand(),
5492            op1.as_operand(),
5493            op2.as_operand(),
5494            op3.as_operand(),
5495        );
5496    }
5497}
5498
5499impl<'a> Vinserti64x2Emitter<Zmm, Zmm, Xmm, Imm> for Assembler<'a> {
5500    fn vinserti64x2(&mut self, op0: Zmm, op1: Zmm, op2: Xmm, op3: Imm) {
5501        self.emit(
5502            VINSERTI64X2_512RRRI,
5503            op0.as_operand(),
5504            op1.as_operand(),
5505            op2.as_operand(),
5506            op3.as_operand(),
5507        );
5508    }
5509}
5510
5511impl<'a> Vinserti64x2Emitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5512    fn vinserti64x2(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5513        self.emit(
5514            VINSERTI64X2_512RRMI,
5515            op0.as_operand(),
5516            op1.as_operand(),
5517            op2.as_operand(),
5518            op3.as_operand(),
5519        );
5520    }
5521}
5522
5523/// `VINSERTI64X2_MASK`.
5524///
5525/// Supported operand variants:
5526///
5527/// ```text
5528/// +---+--------------------+
5529/// | # | Operands           |
5530/// +---+--------------------+
5531/// | 1 | Ymm, Ymm, Mem, Imm |
5532/// | 2 | Ymm, Ymm, Xmm, Imm |
5533/// | 3 | Zmm, Zmm, Mem, Imm |
5534/// | 4 | Zmm, Zmm, Xmm, Imm |
5535/// +---+--------------------+
5536/// ```
5537pub trait Vinserti64x2MaskEmitter<A, B, C, D> {
5538    fn vinserti64x2_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
5539}
5540
5541impl<'a> Vinserti64x2MaskEmitter<Ymm, Ymm, Xmm, Imm> for Assembler<'a> {
5542    fn vinserti64x2_mask(&mut self, op0: Ymm, op1: Ymm, op2: Xmm, op3: Imm) {
5543        self.emit(
5544            VINSERTI64X2_256RRRI_MASK,
5545            op0.as_operand(),
5546            op1.as_operand(),
5547            op2.as_operand(),
5548            op3.as_operand(),
5549        );
5550    }
5551}
5552
5553impl<'a> Vinserti64x2MaskEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
5554    fn vinserti64x2_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
5555        self.emit(
5556            VINSERTI64X2_256RRMI_MASK,
5557            op0.as_operand(),
5558            op1.as_operand(),
5559            op2.as_operand(),
5560            op3.as_operand(),
5561        );
5562    }
5563}
5564
5565impl<'a> Vinserti64x2MaskEmitter<Zmm, Zmm, Xmm, Imm> for Assembler<'a> {
5566    fn vinserti64x2_mask(&mut self, op0: Zmm, op1: Zmm, op2: Xmm, op3: Imm) {
5567        self.emit(
5568            VINSERTI64X2_512RRRI_MASK,
5569            op0.as_operand(),
5570            op1.as_operand(),
5571            op2.as_operand(),
5572            op3.as_operand(),
5573        );
5574    }
5575}
5576
5577impl<'a> Vinserti64x2MaskEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5578    fn vinserti64x2_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5579        self.emit(
5580            VINSERTI64X2_512RRMI_MASK,
5581            op0.as_operand(),
5582            op1.as_operand(),
5583            op2.as_operand(),
5584            op3.as_operand(),
5585        );
5586    }
5587}
5588
5589/// `VINSERTI64X2_MASKZ`.
5590///
5591/// Supported operand variants:
5592///
5593/// ```text
5594/// +---+--------------------+
5595/// | # | Operands           |
5596/// +---+--------------------+
5597/// | 1 | Ymm, Ymm, Mem, Imm |
5598/// | 2 | Ymm, Ymm, Xmm, Imm |
5599/// | 3 | Zmm, Zmm, Mem, Imm |
5600/// | 4 | Zmm, Zmm, Xmm, Imm |
5601/// +---+--------------------+
5602/// ```
5603pub trait Vinserti64x2MaskzEmitter<A, B, C, D> {
5604    fn vinserti64x2_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
5605}
5606
5607impl<'a> Vinserti64x2MaskzEmitter<Ymm, Ymm, Xmm, Imm> for Assembler<'a> {
5608    fn vinserti64x2_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Xmm, op3: Imm) {
5609        self.emit(
5610            VINSERTI64X2_256RRRI_MASKZ,
5611            op0.as_operand(),
5612            op1.as_operand(),
5613            op2.as_operand(),
5614            op3.as_operand(),
5615        );
5616    }
5617}
5618
5619impl<'a> Vinserti64x2MaskzEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
5620    fn vinserti64x2_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
5621        self.emit(
5622            VINSERTI64X2_256RRMI_MASKZ,
5623            op0.as_operand(),
5624            op1.as_operand(),
5625            op2.as_operand(),
5626            op3.as_operand(),
5627        );
5628    }
5629}
5630
5631impl<'a> Vinserti64x2MaskzEmitter<Zmm, Zmm, Xmm, Imm> for Assembler<'a> {
5632    fn vinserti64x2_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Xmm, op3: Imm) {
5633        self.emit(
5634            VINSERTI64X2_512RRRI_MASKZ,
5635            op0.as_operand(),
5636            op1.as_operand(),
5637            op2.as_operand(),
5638            op3.as_operand(),
5639        );
5640    }
5641}
5642
5643impl<'a> Vinserti64x2MaskzEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
5644    fn vinserti64x2_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
5645        self.emit(
5646            VINSERTI64X2_512RRMI_MASKZ,
5647            op0.as_operand(),
5648            op1.as_operand(),
5649            op2.as_operand(),
5650            op3.as_operand(),
5651        );
5652    }
5653}
5654
5655/// `VORPD`.
5656///
5657/// Supported operand variants:
5658///
5659/// ```text
5660/// +---+---------------+
5661/// | # | Operands      |
5662/// +---+---------------+
5663/// | 1 | Xmm, Xmm, Mem |
5664/// | 2 | Xmm, Xmm, Xmm |
5665/// | 3 | Ymm, Ymm, Mem |
5666/// | 4 | Ymm, Ymm, Ymm |
5667/// | 5 | Zmm, Zmm, Mem |
5668/// | 6 | Zmm, Zmm, Zmm |
5669/// +---+---------------+
5670/// ```
5671pub trait VorpdEmitter<A, B, C> {
5672    fn vorpd(&mut self, op0: A, op1: B, op2: C);
5673}
5674
5675impl<'a> VorpdEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
5676    fn vorpd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
5677        self.emit(
5678            VORPD128RRR,
5679            op0.as_operand(),
5680            op1.as_operand(),
5681            op2.as_operand(),
5682            &NOREG,
5683        );
5684    }
5685}
5686
5687impl<'a> VorpdEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
5688    fn vorpd(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
5689        self.emit(
5690            VORPD128RRM,
5691            op0.as_operand(),
5692            op1.as_operand(),
5693            op2.as_operand(),
5694            &NOREG,
5695        );
5696    }
5697}
5698
5699impl<'a> VorpdEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
5700    fn vorpd(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
5701        self.emit(
5702            VORPD256RRR,
5703            op0.as_operand(),
5704            op1.as_operand(),
5705            op2.as_operand(),
5706            &NOREG,
5707        );
5708    }
5709}
5710
5711impl<'a> VorpdEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
5712    fn vorpd(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
5713        self.emit(
5714            VORPD256RRM,
5715            op0.as_operand(),
5716            op1.as_operand(),
5717            op2.as_operand(),
5718            &NOREG,
5719        );
5720    }
5721}
5722
5723impl<'a> VorpdEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
5724    fn vorpd(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
5725        self.emit(
5726            VORPD512RRR,
5727            op0.as_operand(),
5728            op1.as_operand(),
5729            op2.as_operand(),
5730            &NOREG,
5731        );
5732    }
5733}
5734
5735impl<'a> VorpdEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
5736    fn vorpd(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
5737        self.emit(
5738            VORPD512RRM,
5739            op0.as_operand(),
5740            op1.as_operand(),
5741            op2.as_operand(),
5742            &NOREG,
5743        );
5744    }
5745}
5746
5747/// `VORPD_MASK`.
5748///
5749/// Supported operand variants:
5750///
5751/// ```text
5752/// +---+---------------+
5753/// | # | Operands      |
5754/// +---+---------------+
5755/// | 1 | Xmm, Xmm, Mem |
5756/// | 2 | Xmm, Xmm, Xmm |
5757/// | 3 | Ymm, Ymm, Mem |
5758/// | 4 | Ymm, Ymm, Ymm |
5759/// | 5 | Zmm, Zmm, Mem |
5760/// | 6 | Zmm, Zmm, Zmm |
5761/// +---+---------------+
5762/// ```
5763pub trait VorpdMaskEmitter<A, B, C> {
5764    fn vorpd_mask(&mut self, op0: A, op1: B, op2: C);
5765}
5766
5767impl<'a> VorpdMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
5768    fn vorpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
5769        self.emit(
5770            VORPD128RRR_MASK,
5771            op0.as_operand(),
5772            op1.as_operand(),
5773            op2.as_operand(),
5774            &NOREG,
5775        );
5776    }
5777}
5778
5779impl<'a> VorpdMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
5780    fn vorpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
5781        self.emit(
5782            VORPD128RRM_MASK,
5783            op0.as_operand(),
5784            op1.as_operand(),
5785            op2.as_operand(),
5786            &NOREG,
5787        );
5788    }
5789}
5790
5791impl<'a> VorpdMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
5792    fn vorpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
5793        self.emit(
5794            VORPD256RRR_MASK,
5795            op0.as_operand(),
5796            op1.as_operand(),
5797            op2.as_operand(),
5798            &NOREG,
5799        );
5800    }
5801}
5802
5803impl<'a> VorpdMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
5804    fn vorpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
5805        self.emit(
5806            VORPD256RRM_MASK,
5807            op0.as_operand(),
5808            op1.as_operand(),
5809            op2.as_operand(),
5810            &NOREG,
5811        );
5812    }
5813}
5814
5815impl<'a> VorpdMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
5816    fn vorpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
5817        self.emit(
5818            VORPD512RRR_MASK,
5819            op0.as_operand(),
5820            op1.as_operand(),
5821            op2.as_operand(),
5822            &NOREG,
5823        );
5824    }
5825}
5826
5827impl<'a> VorpdMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
5828    fn vorpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
5829        self.emit(
5830            VORPD512RRM_MASK,
5831            op0.as_operand(),
5832            op1.as_operand(),
5833            op2.as_operand(),
5834            &NOREG,
5835        );
5836    }
5837}
5838
5839/// `VORPD_MASKZ`.
5840///
5841/// Supported operand variants:
5842///
5843/// ```text
5844/// +---+---------------+
5845/// | # | Operands      |
5846/// +---+---------------+
5847/// | 1 | Xmm, Xmm, Mem |
5848/// | 2 | Xmm, Xmm, Xmm |
5849/// | 3 | Ymm, Ymm, Mem |
5850/// | 4 | Ymm, Ymm, Ymm |
5851/// | 5 | Zmm, Zmm, Mem |
5852/// | 6 | Zmm, Zmm, Zmm |
5853/// +---+---------------+
5854/// ```
5855pub trait VorpdMaskzEmitter<A, B, C> {
5856    fn vorpd_maskz(&mut self, op0: A, op1: B, op2: C);
5857}
5858
5859impl<'a> VorpdMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
5860    fn vorpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
5861        self.emit(
5862            VORPD128RRR_MASKZ,
5863            op0.as_operand(),
5864            op1.as_operand(),
5865            op2.as_operand(),
5866            &NOREG,
5867        );
5868    }
5869}
5870
5871impl<'a> VorpdMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
5872    fn vorpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
5873        self.emit(
5874            VORPD128RRM_MASKZ,
5875            op0.as_operand(),
5876            op1.as_operand(),
5877            op2.as_operand(),
5878            &NOREG,
5879        );
5880    }
5881}
5882
5883impl<'a> VorpdMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
5884    fn vorpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
5885        self.emit(
5886            VORPD256RRR_MASKZ,
5887            op0.as_operand(),
5888            op1.as_operand(),
5889            op2.as_operand(),
5890            &NOREG,
5891        );
5892    }
5893}
5894
5895impl<'a> VorpdMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
5896    fn vorpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
5897        self.emit(
5898            VORPD256RRM_MASKZ,
5899            op0.as_operand(),
5900            op1.as_operand(),
5901            op2.as_operand(),
5902            &NOREG,
5903        );
5904    }
5905}
5906
5907impl<'a> VorpdMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
5908    fn vorpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
5909        self.emit(
5910            VORPD512RRR_MASKZ,
5911            op0.as_operand(),
5912            op1.as_operand(),
5913            op2.as_operand(),
5914            &NOREG,
5915        );
5916    }
5917}
5918
5919impl<'a> VorpdMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
5920    fn vorpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
5921        self.emit(
5922            VORPD512RRM_MASKZ,
5923            op0.as_operand(),
5924            op1.as_operand(),
5925            op2.as_operand(),
5926            &NOREG,
5927        );
5928    }
5929}
5930
5931/// `VORPS`.
5932///
5933/// Supported operand variants:
5934///
5935/// ```text
5936/// +---+---------------+
5937/// | # | Operands      |
5938/// +---+---------------+
5939/// | 1 | Xmm, Xmm, Mem |
5940/// | 2 | Xmm, Xmm, Xmm |
5941/// | 3 | Ymm, Ymm, Mem |
5942/// | 4 | Ymm, Ymm, Ymm |
5943/// | 5 | Zmm, Zmm, Mem |
5944/// | 6 | Zmm, Zmm, Zmm |
5945/// +---+---------------+
5946/// ```
5947pub trait VorpsEmitter<A, B, C> {
5948    fn vorps(&mut self, op0: A, op1: B, op2: C);
5949}
5950
5951impl<'a> VorpsEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
5952    fn vorps(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
5953        self.emit(
5954            VORPS128RRR,
5955            op0.as_operand(),
5956            op1.as_operand(),
5957            op2.as_operand(),
5958            &NOREG,
5959        );
5960    }
5961}
5962
5963impl<'a> VorpsEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
5964    fn vorps(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
5965        self.emit(
5966            VORPS128RRM,
5967            op0.as_operand(),
5968            op1.as_operand(),
5969            op2.as_operand(),
5970            &NOREG,
5971        );
5972    }
5973}
5974
5975impl<'a> VorpsEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
5976    fn vorps(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
5977        self.emit(
5978            VORPS256RRR,
5979            op0.as_operand(),
5980            op1.as_operand(),
5981            op2.as_operand(),
5982            &NOREG,
5983        );
5984    }
5985}
5986
5987impl<'a> VorpsEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
5988    fn vorps(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
5989        self.emit(
5990            VORPS256RRM,
5991            op0.as_operand(),
5992            op1.as_operand(),
5993            op2.as_operand(),
5994            &NOREG,
5995        );
5996    }
5997}
5998
5999impl<'a> VorpsEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6000    fn vorps(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6001        self.emit(
6002            VORPS512RRR,
6003            op0.as_operand(),
6004            op1.as_operand(),
6005            op2.as_operand(),
6006            &NOREG,
6007        );
6008    }
6009}
6010
6011impl<'a> VorpsEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6012    fn vorps(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6013        self.emit(
6014            VORPS512RRM,
6015            op0.as_operand(),
6016            op1.as_operand(),
6017            op2.as_operand(),
6018            &NOREG,
6019        );
6020    }
6021}
6022
6023/// `VORPS_MASK`.
6024///
6025/// Supported operand variants:
6026///
6027/// ```text
6028/// +---+---------------+
6029/// | # | Operands      |
6030/// +---+---------------+
6031/// | 1 | Xmm, Xmm, Mem |
6032/// | 2 | Xmm, Xmm, Xmm |
6033/// | 3 | Ymm, Ymm, Mem |
6034/// | 4 | Ymm, Ymm, Ymm |
6035/// | 5 | Zmm, Zmm, Mem |
6036/// | 6 | Zmm, Zmm, Zmm |
6037/// +---+---------------+
6038/// ```
6039pub trait VorpsMaskEmitter<A, B, C> {
6040    fn vorps_mask(&mut self, op0: A, op1: B, op2: C);
6041}
6042
6043impl<'a> VorpsMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6044    fn vorps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6045        self.emit(
6046            VORPS128RRR_MASK,
6047            op0.as_operand(),
6048            op1.as_operand(),
6049            op2.as_operand(),
6050            &NOREG,
6051        );
6052    }
6053}
6054
6055impl<'a> VorpsMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6056    fn vorps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6057        self.emit(
6058            VORPS128RRM_MASK,
6059            op0.as_operand(),
6060            op1.as_operand(),
6061            op2.as_operand(),
6062            &NOREG,
6063        );
6064    }
6065}
6066
6067impl<'a> VorpsMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6068    fn vorps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6069        self.emit(
6070            VORPS256RRR_MASK,
6071            op0.as_operand(),
6072            op1.as_operand(),
6073            op2.as_operand(),
6074            &NOREG,
6075        );
6076    }
6077}
6078
6079impl<'a> VorpsMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6080    fn vorps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6081        self.emit(
6082            VORPS256RRM_MASK,
6083            op0.as_operand(),
6084            op1.as_operand(),
6085            op2.as_operand(),
6086            &NOREG,
6087        );
6088    }
6089}
6090
6091impl<'a> VorpsMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6092    fn vorps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6093        self.emit(
6094            VORPS512RRR_MASK,
6095            op0.as_operand(),
6096            op1.as_operand(),
6097            op2.as_operand(),
6098            &NOREG,
6099        );
6100    }
6101}
6102
6103impl<'a> VorpsMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6104    fn vorps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6105        self.emit(
6106            VORPS512RRM_MASK,
6107            op0.as_operand(),
6108            op1.as_operand(),
6109            op2.as_operand(),
6110            &NOREG,
6111        );
6112    }
6113}
6114
6115/// `VORPS_MASKZ`.
6116///
6117/// Supported operand variants:
6118///
6119/// ```text
6120/// +---+---------------+
6121/// | # | Operands      |
6122/// +---+---------------+
6123/// | 1 | Xmm, Xmm, Mem |
6124/// | 2 | Xmm, Xmm, Xmm |
6125/// | 3 | Ymm, Ymm, Mem |
6126/// | 4 | Ymm, Ymm, Ymm |
6127/// | 5 | Zmm, Zmm, Mem |
6128/// | 6 | Zmm, Zmm, Zmm |
6129/// +---+---------------+
6130/// ```
6131pub trait VorpsMaskzEmitter<A, B, C> {
6132    fn vorps_maskz(&mut self, op0: A, op1: B, op2: C);
6133}
6134
6135impl<'a> VorpsMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6136    fn vorps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6137        self.emit(
6138            VORPS128RRR_MASKZ,
6139            op0.as_operand(),
6140            op1.as_operand(),
6141            op2.as_operand(),
6142            &NOREG,
6143        );
6144    }
6145}
6146
6147impl<'a> VorpsMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6148    fn vorps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6149        self.emit(
6150            VORPS128RRM_MASKZ,
6151            op0.as_operand(),
6152            op1.as_operand(),
6153            op2.as_operand(),
6154            &NOREG,
6155        );
6156    }
6157}
6158
6159impl<'a> VorpsMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6160    fn vorps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6161        self.emit(
6162            VORPS256RRR_MASKZ,
6163            op0.as_operand(),
6164            op1.as_operand(),
6165            op2.as_operand(),
6166            &NOREG,
6167        );
6168    }
6169}
6170
6171impl<'a> VorpsMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6172    fn vorps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6173        self.emit(
6174            VORPS256RRM_MASKZ,
6175            op0.as_operand(),
6176            op1.as_operand(),
6177            op2.as_operand(),
6178            &NOREG,
6179        );
6180    }
6181}
6182
6183impl<'a> VorpsMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6184    fn vorps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6185        self.emit(
6186            VORPS512RRR_MASKZ,
6187            op0.as_operand(),
6188            op1.as_operand(),
6189            op2.as_operand(),
6190            &NOREG,
6191        );
6192    }
6193}
6194
6195impl<'a> VorpsMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6196    fn vorps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6197        self.emit(
6198            VORPS512RRM_MASKZ,
6199            op0.as_operand(),
6200            op1.as_operand(),
6201            op2.as_operand(),
6202            &NOREG,
6203        );
6204    }
6205}
6206
6207/// `VPMOVD2M`.
6208///
6209/// Supported operand variants:
6210///
6211/// ```text
6212/// +---+-----------+
6213/// | # | Operands  |
6214/// +---+-----------+
6215/// | 1 | KReg, Xmm |
6216/// | 2 | KReg, Ymm |
6217/// | 3 | KReg, Zmm |
6218/// +---+-----------+
6219/// ```
6220pub trait Vpmovd2mEmitter<A, B> {
6221    fn vpmovd2m(&mut self, op0: A, op1: B);
6222}
6223
6224impl<'a> Vpmovd2mEmitter<KReg, Xmm> for Assembler<'a> {
6225    fn vpmovd2m(&mut self, op0: KReg, op1: Xmm) {
6226        self.emit(
6227            VPMOVD2M128KR,
6228            op0.as_operand(),
6229            op1.as_operand(),
6230            &NOREG,
6231            &NOREG,
6232        );
6233    }
6234}
6235
6236impl<'a> Vpmovd2mEmitter<KReg, Ymm> for Assembler<'a> {
6237    fn vpmovd2m(&mut self, op0: KReg, op1: Ymm) {
6238        self.emit(
6239            VPMOVD2M256KR,
6240            op0.as_operand(),
6241            op1.as_operand(),
6242            &NOREG,
6243            &NOREG,
6244        );
6245    }
6246}
6247
6248impl<'a> Vpmovd2mEmitter<KReg, Zmm> for Assembler<'a> {
6249    fn vpmovd2m(&mut self, op0: KReg, op1: Zmm) {
6250        self.emit(
6251            VPMOVD2M512KR,
6252            op0.as_operand(),
6253            op1.as_operand(),
6254            &NOREG,
6255            &NOREG,
6256        );
6257    }
6258}
6259
6260/// `VPMOVM2D`.
6261///
6262/// Supported operand variants:
6263///
6264/// ```text
6265/// +---+-----------+
6266/// | # | Operands  |
6267/// +---+-----------+
6268/// | 1 | Xmm, KReg |
6269/// | 2 | Ymm, KReg |
6270/// | 3 | Zmm, KReg |
6271/// +---+-----------+
6272/// ```
6273pub trait Vpmovm2dEmitter<A, B> {
6274    fn vpmovm2d(&mut self, op0: A, op1: B);
6275}
6276
6277impl<'a> Vpmovm2dEmitter<Xmm, KReg> for Assembler<'a> {
6278    fn vpmovm2d(&mut self, op0: Xmm, op1: KReg) {
6279        self.emit(
6280            VPMOVM2D128RK,
6281            op0.as_operand(),
6282            op1.as_operand(),
6283            &NOREG,
6284            &NOREG,
6285        );
6286    }
6287}
6288
6289impl<'a> Vpmovm2dEmitter<Ymm, KReg> for Assembler<'a> {
6290    fn vpmovm2d(&mut self, op0: Ymm, op1: KReg) {
6291        self.emit(
6292            VPMOVM2D256RK,
6293            op0.as_operand(),
6294            op1.as_operand(),
6295            &NOREG,
6296            &NOREG,
6297        );
6298    }
6299}
6300
6301impl<'a> Vpmovm2dEmitter<Zmm, KReg> for Assembler<'a> {
6302    fn vpmovm2d(&mut self, op0: Zmm, op1: KReg) {
6303        self.emit(
6304            VPMOVM2D512RK,
6305            op0.as_operand(),
6306            op1.as_operand(),
6307            &NOREG,
6308            &NOREG,
6309        );
6310    }
6311}
6312
6313/// `VPMOVM2Q`.
6314///
6315/// Supported operand variants:
6316///
6317/// ```text
6318/// +---+-----------+
6319/// | # | Operands  |
6320/// +---+-----------+
6321/// | 1 | Xmm, KReg |
6322/// | 2 | Ymm, KReg |
6323/// | 3 | Zmm, KReg |
6324/// +---+-----------+
6325/// ```
6326pub trait Vpmovm2qEmitter<A, B> {
6327    fn vpmovm2q(&mut self, op0: A, op1: B);
6328}
6329
6330impl<'a> Vpmovm2qEmitter<Xmm, KReg> for Assembler<'a> {
6331    fn vpmovm2q(&mut self, op0: Xmm, op1: KReg) {
6332        self.emit(
6333            VPMOVM2Q128RK,
6334            op0.as_operand(),
6335            op1.as_operand(),
6336            &NOREG,
6337            &NOREG,
6338        );
6339    }
6340}
6341
6342impl<'a> Vpmovm2qEmitter<Ymm, KReg> for Assembler<'a> {
6343    fn vpmovm2q(&mut self, op0: Ymm, op1: KReg) {
6344        self.emit(
6345            VPMOVM2Q256RK,
6346            op0.as_operand(),
6347            op1.as_operand(),
6348            &NOREG,
6349            &NOREG,
6350        );
6351    }
6352}
6353
6354impl<'a> Vpmovm2qEmitter<Zmm, KReg> for Assembler<'a> {
6355    fn vpmovm2q(&mut self, op0: Zmm, op1: KReg) {
6356        self.emit(
6357            VPMOVM2Q512RK,
6358            op0.as_operand(),
6359            op1.as_operand(),
6360            &NOREG,
6361            &NOREG,
6362        );
6363    }
6364}
6365
6366/// `VPMOVQ2M`.
6367///
6368/// Supported operand variants:
6369///
6370/// ```text
6371/// +---+-----------+
6372/// | # | Operands  |
6373/// +---+-----------+
6374/// | 1 | KReg, Xmm |
6375/// | 2 | KReg, Ymm |
6376/// | 3 | KReg, Zmm |
6377/// +---+-----------+
6378/// ```
6379pub trait Vpmovq2mEmitter<A, B> {
6380    fn vpmovq2m(&mut self, op0: A, op1: B);
6381}
6382
6383impl<'a> Vpmovq2mEmitter<KReg, Xmm> for Assembler<'a> {
6384    fn vpmovq2m(&mut self, op0: KReg, op1: Xmm) {
6385        self.emit(
6386            VPMOVQ2M128KR,
6387            op0.as_operand(),
6388            op1.as_operand(),
6389            &NOREG,
6390            &NOREG,
6391        );
6392    }
6393}
6394
6395impl<'a> Vpmovq2mEmitter<KReg, Ymm> for Assembler<'a> {
6396    fn vpmovq2m(&mut self, op0: KReg, op1: Ymm) {
6397        self.emit(
6398            VPMOVQ2M256KR,
6399            op0.as_operand(),
6400            op1.as_operand(),
6401            &NOREG,
6402            &NOREG,
6403        );
6404    }
6405}
6406
6407impl<'a> Vpmovq2mEmitter<KReg, Zmm> for Assembler<'a> {
6408    fn vpmovq2m(&mut self, op0: KReg, op1: Zmm) {
6409        self.emit(
6410            VPMOVQ2M512KR,
6411            op0.as_operand(),
6412            op1.as_operand(),
6413            &NOREG,
6414            &NOREG,
6415        );
6416    }
6417}
6418
6419/// `VPMULLD`.
6420///
6421/// Supported operand variants:
6422///
6423/// ```text
6424/// +---+---------------+
6425/// | # | Operands      |
6426/// +---+---------------+
6427/// | 1 | Xmm, Xmm, Mem |
6428/// | 2 | Xmm, Xmm, Xmm |
6429/// | 3 | Ymm, Ymm, Mem |
6430/// | 4 | Ymm, Ymm, Ymm |
6431/// | 5 | Zmm, Zmm, Mem |
6432/// | 6 | Zmm, Zmm, Zmm |
6433/// +---+---------------+
6434/// ```
6435pub trait VpmulldEmitter<A, B, C> {
6436    fn vpmulld(&mut self, op0: A, op1: B, op2: C);
6437}
6438
6439impl<'a> VpmulldEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6440    fn vpmulld(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6441        self.emit(
6442            VPMULLD128RRR,
6443            op0.as_operand(),
6444            op1.as_operand(),
6445            op2.as_operand(),
6446            &NOREG,
6447        );
6448    }
6449}
6450
6451impl<'a> VpmulldEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6452    fn vpmulld(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6453        self.emit(
6454            VPMULLD128RRM,
6455            op0.as_operand(),
6456            op1.as_operand(),
6457            op2.as_operand(),
6458            &NOREG,
6459        );
6460    }
6461}
6462
6463impl<'a> VpmulldEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6464    fn vpmulld(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6465        self.emit(
6466            VPMULLD256RRR,
6467            op0.as_operand(),
6468            op1.as_operand(),
6469            op2.as_operand(),
6470            &NOREG,
6471        );
6472    }
6473}
6474
6475impl<'a> VpmulldEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6476    fn vpmulld(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6477        self.emit(
6478            VPMULLD256RRM,
6479            op0.as_operand(),
6480            op1.as_operand(),
6481            op2.as_operand(),
6482            &NOREG,
6483        );
6484    }
6485}
6486
6487impl<'a> VpmulldEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6488    fn vpmulld(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6489        self.emit(
6490            VPMULLD512RRR,
6491            op0.as_operand(),
6492            op1.as_operand(),
6493            op2.as_operand(),
6494            &NOREG,
6495        );
6496    }
6497}
6498
6499impl<'a> VpmulldEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6500    fn vpmulld(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6501        self.emit(
6502            VPMULLD512RRM,
6503            op0.as_operand(),
6504            op1.as_operand(),
6505            op2.as_operand(),
6506            &NOREG,
6507        );
6508    }
6509}
6510
6511/// `VPMULLD_MASK`.
6512///
6513/// Supported operand variants:
6514///
6515/// ```text
6516/// +---+---------------+
6517/// | # | Operands      |
6518/// +---+---------------+
6519/// | 1 | Xmm, Xmm, Mem |
6520/// | 2 | Xmm, Xmm, Xmm |
6521/// | 3 | Ymm, Ymm, Mem |
6522/// | 4 | Ymm, Ymm, Ymm |
6523/// | 5 | Zmm, Zmm, Mem |
6524/// | 6 | Zmm, Zmm, Zmm |
6525/// +---+---------------+
6526/// ```
6527pub trait VpmulldMaskEmitter<A, B, C> {
6528    fn vpmulld_mask(&mut self, op0: A, op1: B, op2: C);
6529}
6530
6531impl<'a> VpmulldMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6532    fn vpmulld_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6533        self.emit(
6534            VPMULLD128RRR_MASK,
6535            op0.as_operand(),
6536            op1.as_operand(),
6537            op2.as_operand(),
6538            &NOREG,
6539        );
6540    }
6541}
6542
6543impl<'a> VpmulldMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6544    fn vpmulld_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6545        self.emit(
6546            VPMULLD128RRM_MASK,
6547            op0.as_operand(),
6548            op1.as_operand(),
6549            op2.as_operand(),
6550            &NOREG,
6551        );
6552    }
6553}
6554
6555impl<'a> VpmulldMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6556    fn vpmulld_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6557        self.emit(
6558            VPMULLD256RRR_MASK,
6559            op0.as_operand(),
6560            op1.as_operand(),
6561            op2.as_operand(),
6562            &NOREG,
6563        );
6564    }
6565}
6566
6567impl<'a> VpmulldMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6568    fn vpmulld_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6569        self.emit(
6570            VPMULLD256RRM_MASK,
6571            op0.as_operand(),
6572            op1.as_operand(),
6573            op2.as_operand(),
6574            &NOREG,
6575        );
6576    }
6577}
6578
6579impl<'a> VpmulldMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6580    fn vpmulld_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6581        self.emit(
6582            VPMULLD512RRR_MASK,
6583            op0.as_operand(),
6584            op1.as_operand(),
6585            op2.as_operand(),
6586            &NOREG,
6587        );
6588    }
6589}
6590
6591impl<'a> VpmulldMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6592    fn vpmulld_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6593        self.emit(
6594            VPMULLD512RRM_MASK,
6595            op0.as_operand(),
6596            op1.as_operand(),
6597            op2.as_operand(),
6598            &NOREG,
6599        );
6600    }
6601}
6602
6603/// `VPMULLD_MASKZ`.
6604///
6605/// Supported operand variants:
6606///
6607/// ```text
6608/// +---+---------------+
6609/// | # | Operands      |
6610/// +---+---------------+
6611/// | 1 | Xmm, Xmm, Mem |
6612/// | 2 | Xmm, Xmm, Xmm |
6613/// | 3 | Ymm, Ymm, Mem |
6614/// | 4 | Ymm, Ymm, Ymm |
6615/// | 5 | Zmm, Zmm, Mem |
6616/// | 6 | Zmm, Zmm, Zmm |
6617/// +---+---------------+
6618/// ```
6619pub trait VpmulldMaskzEmitter<A, B, C> {
6620    fn vpmulld_maskz(&mut self, op0: A, op1: B, op2: C);
6621}
6622
6623impl<'a> VpmulldMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6624    fn vpmulld_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6625        self.emit(
6626            VPMULLD128RRR_MASKZ,
6627            op0.as_operand(),
6628            op1.as_operand(),
6629            op2.as_operand(),
6630            &NOREG,
6631        );
6632    }
6633}
6634
6635impl<'a> VpmulldMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6636    fn vpmulld_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6637        self.emit(
6638            VPMULLD128RRM_MASKZ,
6639            op0.as_operand(),
6640            op1.as_operand(),
6641            op2.as_operand(),
6642            &NOREG,
6643        );
6644    }
6645}
6646
6647impl<'a> VpmulldMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6648    fn vpmulld_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6649        self.emit(
6650            VPMULLD256RRR_MASKZ,
6651            op0.as_operand(),
6652            op1.as_operand(),
6653            op2.as_operand(),
6654            &NOREG,
6655        );
6656    }
6657}
6658
6659impl<'a> VpmulldMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6660    fn vpmulld_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6661        self.emit(
6662            VPMULLD256RRM_MASKZ,
6663            op0.as_operand(),
6664            op1.as_operand(),
6665            op2.as_operand(),
6666            &NOREG,
6667        );
6668    }
6669}
6670
6671impl<'a> VpmulldMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6672    fn vpmulld_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6673        self.emit(
6674            VPMULLD512RRR_MASKZ,
6675            op0.as_operand(),
6676            op1.as_operand(),
6677            op2.as_operand(),
6678            &NOREG,
6679        );
6680    }
6681}
6682
6683impl<'a> VpmulldMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6684    fn vpmulld_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6685        self.emit(
6686            VPMULLD512RRM_MASKZ,
6687            op0.as_operand(),
6688            op1.as_operand(),
6689            op2.as_operand(),
6690            &NOREG,
6691        );
6692    }
6693}
6694
6695/// `VPMULLQ`.
6696///
6697/// Supported operand variants:
6698///
6699/// ```text
6700/// +---+---------------+
6701/// | # | Operands      |
6702/// +---+---------------+
6703/// | 1 | Xmm, Xmm, Mem |
6704/// | 2 | Xmm, Xmm, Xmm |
6705/// | 3 | Ymm, Ymm, Mem |
6706/// | 4 | Ymm, Ymm, Ymm |
6707/// | 5 | Zmm, Zmm, Mem |
6708/// | 6 | Zmm, Zmm, Zmm |
6709/// +---+---------------+
6710/// ```
6711pub trait VpmullqEmitter<A, B, C> {
6712    fn vpmullq(&mut self, op0: A, op1: B, op2: C);
6713}
6714
6715impl<'a> VpmullqEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6716    fn vpmullq(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6717        self.emit(
6718            VPMULLQ128RRR,
6719            op0.as_operand(),
6720            op1.as_operand(),
6721            op2.as_operand(),
6722            &NOREG,
6723        );
6724    }
6725}
6726
6727impl<'a> VpmullqEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6728    fn vpmullq(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6729        self.emit(
6730            VPMULLQ128RRM,
6731            op0.as_operand(),
6732            op1.as_operand(),
6733            op2.as_operand(),
6734            &NOREG,
6735        );
6736    }
6737}
6738
6739impl<'a> VpmullqEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6740    fn vpmullq(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6741        self.emit(
6742            VPMULLQ256RRR,
6743            op0.as_operand(),
6744            op1.as_operand(),
6745            op2.as_operand(),
6746            &NOREG,
6747        );
6748    }
6749}
6750
6751impl<'a> VpmullqEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6752    fn vpmullq(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6753        self.emit(
6754            VPMULLQ256RRM,
6755            op0.as_operand(),
6756            op1.as_operand(),
6757            op2.as_operand(),
6758            &NOREG,
6759        );
6760    }
6761}
6762
6763impl<'a> VpmullqEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6764    fn vpmullq(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6765        self.emit(
6766            VPMULLQ512RRR,
6767            op0.as_operand(),
6768            op1.as_operand(),
6769            op2.as_operand(),
6770            &NOREG,
6771        );
6772    }
6773}
6774
6775impl<'a> VpmullqEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6776    fn vpmullq(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6777        self.emit(
6778            VPMULLQ512RRM,
6779            op0.as_operand(),
6780            op1.as_operand(),
6781            op2.as_operand(),
6782            &NOREG,
6783        );
6784    }
6785}
6786
6787/// `VPMULLQ_MASK`.
6788///
6789/// Supported operand variants:
6790///
6791/// ```text
6792/// +---+---------------+
6793/// | # | Operands      |
6794/// +---+---------------+
6795/// | 1 | Xmm, Xmm, Mem |
6796/// | 2 | Xmm, Xmm, Xmm |
6797/// | 3 | Ymm, Ymm, Mem |
6798/// | 4 | Ymm, Ymm, Ymm |
6799/// | 5 | Zmm, Zmm, Mem |
6800/// | 6 | Zmm, Zmm, Zmm |
6801/// +---+---------------+
6802/// ```
6803pub trait VpmullqMaskEmitter<A, B, C> {
6804    fn vpmullq_mask(&mut self, op0: A, op1: B, op2: C);
6805}
6806
6807impl<'a> VpmullqMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6808    fn vpmullq_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6809        self.emit(
6810            VPMULLQ128RRR_MASK,
6811            op0.as_operand(),
6812            op1.as_operand(),
6813            op2.as_operand(),
6814            &NOREG,
6815        );
6816    }
6817}
6818
6819impl<'a> VpmullqMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6820    fn vpmullq_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6821        self.emit(
6822            VPMULLQ128RRM_MASK,
6823            op0.as_operand(),
6824            op1.as_operand(),
6825            op2.as_operand(),
6826            &NOREG,
6827        );
6828    }
6829}
6830
6831impl<'a> VpmullqMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6832    fn vpmullq_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6833        self.emit(
6834            VPMULLQ256RRR_MASK,
6835            op0.as_operand(),
6836            op1.as_operand(),
6837            op2.as_operand(),
6838            &NOREG,
6839        );
6840    }
6841}
6842
6843impl<'a> VpmullqMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6844    fn vpmullq_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6845        self.emit(
6846            VPMULLQ256RRM_MASK,
6847            op0.as_operand(),
6848            op1.as_operand(),
6849            op2.as_operand(),
6850            &NOREG,
6851        );
6852    }
6853}
6854
6855impl<'a> VpmullqMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6856    fn vpmullq_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6857        self.emit(
6858            VPMULLQ512RRR_MASK,
6859            op0.as_operand(),
6860            op1.as_operand(),
6861            op2.as_operand(),
6862            &NOREG,
6863        );
6864    }
6865}
6866
6867impl<'a> VpmullqMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6868    fn vpmullq_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6869        self.emit(
6870            VPMULLQ512RRM_MASK,
6871            op0.as_operand(),
6872            op1.as_operand(),
6873            op2.as_operand(),
6874            &NOREG,
6875        );
6876    }
6877}
6878
6879/// `VPMULLQ_MASKZ`.
6880///
6881/// Supported operand variants:
6882///
6883/// ```text
6884/// +---+---------------+
6885/// | # | Operands      |
6886/// +---+---------------+
6887/// | 1 | Xmm, Xmm, Mem |
6888/// | 2 | Xmm, Xmm, Xmm |
6889/// | 3 | Ymm, Ymm, Mem |
6890/// | 4 | Ymm, Ymm, Ymm |
6891/// | 5 | Zmm, Zmm, Mem |
6892/// | 6 | Zmm, Zmm, Zmm |
6893/// +---+---------------+
6894/// ```
6895pub trait VpmullqMaskzEmitter<A, B, C> {
6896    fn vpmullq_maskz(&mut self, op0: A, op1: B, op2: C);
6897}
6898
6899impl<'a> VpmullqMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
6900    fn vpmullq_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
6901        self.emit(
6902            VPMULLQ128RRR_MASKZ,
6903            op0.as_operand(),
6904            op1.as_operand(),
6905            op2.as_operand(),
6906            &NOREG,
6907        );
6908    }
6909}
6910
6911impl<'a> VpmullqMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
6912    fn vpmullq_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
6913        self.emit(
6914            VPMULLQ128RRM_MASKZ,
6915            op0.as_operand(),
6916            op1.as_operand(),
6917            op2.as_operand(),
6918            &NOREG,
6919        );
6920    }
6921}
6922
6923impl<'a> VpmullqMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
6924    fn vpmullq_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
6925        self.emit(
6926            VPMULLQ256RRR_MASKZ,
6927            op0.as_operand(),
6928            op1.as_operand(),
6929            op2.as_operand(),
6930            &NOREG,
6931        );
6932    }
6933}
6934
6935impl<'a> VpmullqMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
6936    fn vpmullq_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
6937        self.emit(
6938            VPMULLQ256RRM_MASKZ,
6939            op0.as_operand(),
6940            op1.as_operand(),
6941            op2.as_operand(),
6942            &NOREG,
6943        );
6944    }
6945}
6946
6947impl<'a> VpmullqMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
6948    fn vpmullq_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
6949        self.emit(
6950            VPMULLQ512RRR_MASKZ,
6951            op0.as_operand(),
6952            op1.as_operand(),
6953            op2.as_operand(),
6954            &NOREG,
6955        );
6956    }
6957}
6958
6959impl<'a> VpmullqMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
6960    fn vpmullq_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
6961        self.emit(
6962            VPMULLQ512RRM_MASKZ,
6963            op0.as_operand(),
6964            op1.as_operand(),
6965            op2.as_operand(),
6966            &NOREG,
6967        );
6968    }
6969}
6970
6971/// `VRANGEPD`.
6972///
6973/// Supported operand variants:
6974///
6975/// ```text
6976/// +---+--------------------+
6977/// | # | Operands           |
6978/// +---+--------------------+
6979/// | 1 | Xmm, Xmm, Mem, Imm |
6980/// | 2 | Xmm, Xmm, Xmm, Imm |
6981/// | 3 | Ymm, Ymm, Mem, Imm |
6982/// | 4 | Ymm, Ymm, Ymm, Imm |
6983/// | 5 | Zmm, Zmm, Mem, Imm |
6984/// | 6 | Zmm, Zmm, Zmm, Imm |
6985/// +---+--------------------+
6986/// ```
6987pub trait VrangepdEmitter<A, B, C, D> {
6988    fn vrangepd(&mut self, op0: A, op1: B, op2: C, op3: D);
6989}
6990
6991impl<'a> VrangepdEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
6992    fn vrangepd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
6993        self.emit(
6994            VRANGEPD128RRRI,
6995            op0.as_operand(),
6996            op1.as_operand(),
6997            op2.as_operand(),
6998            op3.as_operand(),
6999        );
7000    }
7001}
7002
7003impl<'a> VrangepdEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7004    fn vrangepd(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7005        self.emit(
7006            VRANGEPD128RRMI,
7007            op0.as_operand(),
7008            op1.as_operand(),
7009            op2.as_operand(),
7010            op3.as_operand(),
7011        );
7012    }
7013}
7014
7015impl<'a> VrangepdEmitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
7016    fn vrangepd(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
7017        self.emit(
7018            VRANGEPD256RRRI,
7019            op0.as_operand(),
7020            op1.as_operand(),
7021            op2.as_operand(),
7022            op3.as_operand(),
7023        );
7024    }
7025}
7026
7027impl<'a> VrangepdEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
7028    fn vrangepd(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
7029        self.emit(
7030            VRANGEPD256RRMI,
7031            op0.as_operand(),
7032            op1.as_operand(),
7033            op2.as_operand(),
7034            op3.as_operand(),
7035        );
7036    }
7037}
7038
7039impl<'a> VrangepdEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7040    fn vrangepd(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7041        self.emit(
7042            VRANGEPD512RRRI,
7043            op0.as_operand(),
7044            op1.as_operand(),
7045            op2.as_operand(),
7046            op3.as_operand(),
7047        );
7048    }
7049}
7050
7051impl<'a> VrangepdEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
7052    fn vrangepd(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
7053        self.emit(
7054            VRANGEPD512RRMI,
7055            op0.as_operand(),
7056            op1.as_operand(),
7057            op2.as_operand(),
7058            op3.as_operand(),
7059        );
7060    }
7061}
7062
7063/// `VRANGEPD_MASK`.
7064///
7065/// Supported operand variants:
7066///
7067/// ```text
7068/// +---+--------------------+
7069/// | # | Operands           |
7070/// +---+--------------------+
7071/// | 1 | Xmm, Xmm, Mem, Imm |
7072/// | 2 | Xmm, Xmm, Xmm, Imm |
7073/// | 3 | Ymm, Ymm, Mem, Imm |
7074/// | 4 | Ymm, Ymm, Ymm, Imm |
7075/// | 5 | Zmm, Zmm, Mem, Imm |
7076/// | 6 | Zmm, Zmm, Zmm, Imm |
7077/// +---+--------------------+
7078/// ```
7079pub trait VrangepdMaskEmitter<A, B, C, D> {
7080    fn vrangepd_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
7081}
7082
7083impl<'a> VrangepdMaskEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7084    fn vrangepd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7085        self.emit(
7086            VRANGEPD128RRRI_MASK,
7087            op0.as_operand(),
7088            op1.as_operand(),
7089            op2.as_operand(),
7090            op3.as_operand(),
7091        );
7092    }
7093}
7094
7095impl<'a> VrangepdMaskEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7096    fn vrangepd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7097        self.emit(
7098            VRANGEPD128RRMI_MASK,
7099            op0.as_operand(),
7100            op1.as_operand(),
7101            op2.as_operand(),
7102            op3.as_operand(),
7103        );
7104    }
7105}
7106
7107impl<'a> VrangepdMaskEmitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
7108    fn vrangepd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
7109        self.emit(
7110            VRANGEPD256RRRI_MASK,
7111            op0.as_operand(),
7112            op1.as_operand(),
7113            op2.as_operand(),
7114            op3.as_operand(),
7115        );
7116    }
7117}
7118
7119impl<'a> VrangepdMaskEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
7120    fn vrangepd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
7121        self.emit(
7122            VRANGEPD256RRMI_MASK,
7123            op0.as_operand(),
7124            op1.as_operand(),
7125            op2.as_operand(),
7126            op3.as_operand(),
7127        );
7128    }
7129}
7130
7131impl<'a> VrangepdMaskEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7132    fn vrangepd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7133        self.emit(
7134            VRANGEPD512RRRI_MASK,
7135            op0.as_operand(),
7136            op1.as_operand(),
7137            op2.as_operand(),
7138            op3.as_operand(),
7139        );
7140    }
7141}
7142
7143impl<'a> VrangepdMaskEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
7144    fn vrangepd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
7145        self.emit(
7146            VRANGEPD512RRMI_MASK,
7147            op0.as_operand(),
7148            op1.as_operand(),
7149            op2.as_operand(),
7150            op3.as_operand(),
7151        );
7152    }
7153}
7154
7155/// `VRANGEPD_MASK_SAE`.
7156///
7157/// Supported operand variants:
7158///
7159/// ```text
7160/// +---+--------------------+
7161/// | # | Operands           |
7162/// +---+--------------------+
7163/// | 1 | Zmm, Zmm, Zmm, Imm |
7164/// +---+--------------------+
7165/// ```
7166pub trait VrangepdMaskSaeEmitter<A, B, C, D> {
7167    fn vrangepd_mask_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7168}
7169
7170impl<'a> VrangepdMaskSaeEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7171    fn vrangepd_mask_sae(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7172        self.emit(
7173            VRANGEPD512RRRI_MASK_SAE,
7174            op0.as_operand(),
7175            op1.as_operand(),
7176            op2.as_operand(),
7177            op3.as_operand(),
7178        );
7179    }
7180}
7181
7182/// `VRANGEPD_MASKZ`.
7183///
7184/// Supported operand variants:
7185///
7186/// ```text
7187/// +---+--------------------+
7188/// | # | Operands           |
7189/// +---+--------------------+
7190/// | 1 | Xmm, Xmm, Mem, Imm |
7191/// | 2 | Xmm, Xmm, Xmm, Imm |
7192/// | 3 | Ymm, Ymm, Mem, Imm |
7193/// | 4 | Ymm, Ymm, Ymm, Imm |
7194/// | 5 | Zmm, Zmm, Mem, Imm |
7195/// | 6 | Zmm, Zmm, Zmm, Imm |
7196/// +---+--------------------+
7197/// ```
7198pub trait VrangepdMaskzEmitter<A, B, C, D> {
7199    fn vrangepd_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
7200}
7201
7202impl<'a> VrangepdMaskzEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7203    fn vrangepd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7204        self.emit(
7205            VRANGEPD128RRRI_MASKZ,
7206            op0.as_operand(),
7207            op1.as_operand(),
7208            op2.as_operand(),
7209            op3.as_operand(),
7210        );
7211    }
7212}
7213
7214impl<'a> VrangepdMaskzEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7215    fn vrangepd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7216        self.emit(
7217            VRANGEPD128RRMI_MASKZ,
7218            op0.as_operand(),
7219            op1.as_operand(),
7220            op2.as_operand(),
7221            op3.as_operand(),
7222        );
7223    }
7224}
7225
7226impl<'a> VrangepdMaskzEmitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
7227    fn vrangepd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
7228        self.emit(
7229            VRANGEPD256RRRI_MASKZ,
7230            op0.as_operand(),
7231            op1.as_operand(),
7232            op2.as_operand(),
7233            op3.as_operand(),
7234        );
7235    }
7236}
7237
7238impl<'a> VrangepdMaskzEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
7239    fn vrangepd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
7240        self.emit(
7241            VRANGEPD256RRMI_MASKZ,
7242            op0.as_operand(),
7243            op1.as_operand(),
7244            op2.as_operand(),
7245            op3.as_operand(),
7246        );
7247    }
7248}
7249
7250impl<'a> VrangepdMaskzEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7251    fn vrangepd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7252        self.emit(
7253            VRANGEPD512RRRI_MASKZ,
7254            op0.as_operand(),
7255            op1.as_operand(),
7256            op2.as_operand(),
7257            op3.as_operand(),
7258        );
7259    }
7260}
7261
7262impl<'a> VrangepdMaskzEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
7263    fn vrangepd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
7264        self.emit(
7265            VRANGEPD512RRMI_MASKZ,
7266            op0.as_operand(),
7267            op1.as_operand(),
7268            op2.as_operand(),
7269            op3.as_operand(),
7270        );
7271    }
7272}
7273
7274/// `VRANGEPD_MASKZ_SAE`.
7275///
7276/// Supported operand variants:
7277///
7278/// ```text
7279/// +---+--------------------+
7280/// | # | Operands           |
7281/// +---+--------------------+
7282/// | 1 | Zmm, Zmm, Zmm, Imm |
7283/// +---+--------------------+
7284/// ```
7285pub trait VrangepdMaskzSaeEmitter<A, B, C, D> {
7286    fn vrangepd_maskz_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7287}
7288
7289impl<'a> VrangepdMaskzSaeEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7290    fn vrangepd_maskz_sae(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7291        self.emit(
7292            VRANGEPD512RRRI_MASKZ_SAE,
7293            op0.as_operand(),
7294            op1.as_operand(),
7295            op2.as_operand(),
7296            op3.as_operand(),
7297        );
7298    }
7299}
7300
7301/// `VRANGEPD_SAE`.
7302///
7303/// Supported operand variants:
7304///
7305/// ```text
7306/// +---+--------------------+
7307/// | # | Operands           |
7308/// +---+--------------------+
7309/// | 1 | Zmm, Zmm, Zmm, Imm |
7310/// +---+--------------------+
7311/// ```
7312pub trait VrangepdSaeEmitter<A, B, C, D> {
7313    fn vrangepd_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7314}
7315
7316impl<'a> VrangepdSaeEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7317    fn vrangepd_sae(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7318        self.emit(
7319            VRANGEPD512RRRI_SAE,
7320            op0.as_operand(),
7321            op1.as_operand(),
7322            op2.as_operand(),
7323            op3.as_operand(),
7324        );
7325    }
7326}
7327
7328/// `VRANGEPS`.
7329///
7330/// Supported operand variants:
7331///
7332/// ```text
7333/// +---+--------------------+
7334/// | # | Operands           |
7335/// +---+--------------------+
7336/// | 1 | Xmm, Xmm, Mem, Imm |
7337/// | 2 | Xmm, Xmm, Xmm, Imm |
7338/// | 3 | Ymm, Ymm, Mem, Imm |
7339/// | 4 | Ymm, Ymm, Ymm, Imm |
7340/// | 5 | Zmm, Zmm, Mem, Imm |
7341/// | 6 | Zmm, Zmm, Zmm, Imm |
7342/// +---+--------------------+
7343/// ```
7344pub trait VrangepsEmitter<A, B, C, D> {
7345    fn vrangeps(&mut self, op0: A, op1: B, op2: C, op3: D);
7346}
7347
7348impl<'a> VrangepsEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7349    fn vrangeps(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7350        self.emit(
7351            VRANGEPS128RRRI,
7352            op0.as_operand(),
7353            op1.as_operand(),
7354            op2.as_operand(),
7355            op3.as_operand(),
7356        );
7357    }
7358}
7359
7360impl<'a> VrangepsEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7361    fn vrangeps(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7362        self.emit(
7363            VRANGEPS128RRMI,
7364            op0.as_operand(),
7365            op1.as_operand(),
7366            op2.as_operand(),
7367            op3.as_operand(),
7368        );
7369    }
7370}
7371
7372impl<'a> VrangepsEmitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
7373    fn vrangeps(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
7374        self.emit(
7375            VRANGEPS256RRRI,
7376            op0.as_operand(),
7377            op1.as_operand(),
7378            op2.as_operand(),
7379            op3.as_operand(),
7380        );
7381    }
7382}
7383
7384impl<'a> VrangepsEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
7385    fn vrangeps(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
7386        self.emit(
7387            VRANGEPS256RRMI,
7388            op0.as_operand(),
7389            op1.as_operand(),
7390            op2.as_operand(),
7391            op3.as_operand(),
7392        );
7393    }
7394}
7395
7396impl<'a> VrangepsEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7397    fn vrangeps(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7398        self.emit(
7399            VRANGEPS512RRRI,
7400            op0.as_operand(),
7401            op1.as_operand(),
7402            op2.as_operand(),
7403            op3.as_operand(),
7404        );
7405    }
7406}
7407
7408impl<'a> VrangepsEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
7409    fn vrangeps(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
7410        self.emit(
7411            VRANGEPS512RRMI,
7412            op0.as_operand(),
7413            op1.as_operand(),
7414            op2.as_operand(),
7415            op3.as_operand(),
7416        );
7417    }
7418}
7419
7420/// `VRANGEPS_MASK`.
7421///
7422/// Supported operand variants:
7423///
7424/// ```text
7425/// +---+--------------------+
7426/// | # | Operands           |
7427/// +---+--------------------+
7428/// | 1 | Xmm, Xmm, Mem, Imm |
7429/// | 2 | Xmm, Xmm, Xmm, Imm |
7430/// | 3 | Ymm, Ymm, Mem, Imm |
7431/// | 4 | Ymm, Ymm, Ymm, Imm |
7432/// | 5 | Zmm, Zmm, Mem, Imm |
7433/// | 6 | Zmm, Zmm, Zmm, Imm |
7434/// +---+--------------------+
7435/// ```
7436pub trait VrangepsMaskEmitter<A, B, C, D> {
7437    fn vrangeps_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
7438}
7439
7440impl<'a> VrangepsMaskEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7441    fn vrangeps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7442        self.emit(
7443            VRANGEPS128RRRI_MASK,
7444            op0.as_operand(),
7445            op1.as_operand(),
7446            op2.as_operand(),
7447            op3.as_operand(),
7448        );
7449    }
7450}
7451
7452impl<'a> VrangepsMaskEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7453    fn vrangeps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7454        self.emit(
7455            VRANGEPS128RRMI_MASK,
7456            op0.as_operand(),
7457            op1.as_operand(),
7458            op2.as_operand(),
7459            op3.as_operand(),
7460        );
7461    }
7462}
7463
7464impl<'a> VrangepsMaskEmitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
7465    fn vrangeps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
7466        self.emit(
7467            VRANGEPS256RRRI_MASK,
7468            op0.as_operand(),
7469            op1.as_operand(),
7470            op2.as_operand(),
7471            op3.as_operand(),
7472        );
7473    }
7474}
7475
7476impl<'a> VrangepsMaskEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
7477    fn vrangeps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
7478        self.emit(
7479            VRANGEPS256RRMI_MASK,
7480            op0.as_operand(),
7481            op1.as_operand(),
7482            op2.as_operand(),
7483            op3.as_operand(),
7484        );
7485    }
7486}
7487
7488impl<'a> VrangepsMaskEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7489    fn vrangeps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7490        self.emit(
7491            VRANGEPS512RRRI_MASK,
7492            op0.as_operand(),
7493            op1.as_operand(),
7494            op2.as_operand(),
7495            op3.as_operand(),
7496        );
7497    }
7498}
7499
7500impl<'a> VrangepsMaskEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
7501    fn vrangeps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
7502        self.emit(
7503            VRANGEPS512RRMI_MASK,
7504            op0.as_operand(),
7505            op1.as_operand(),
7506            op2.as_operand(),
7507            op3.as_operand(),
7508        );
7509    }
7510}
7511
7512/// `VRANGEPS_MASK_SAE`.
7513///
7514/// Supported operand variants:
7515///
7516/// ```text
7517/// +---+--------------------+
7518/// | # | Operands           |
7519/// +---+--------------------+
7520/// | 1 | Zmm, Zmm, Zmm, Imm |
7521/// +---+--------------------+
7522/// ```
7523pub trait VrangepsMaskSaeEmitter<A, B, C, D> {
7524    fn vrangeps_mask_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7525}
7526
7527impl<'a> VrangepsMaskSaeEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7528    fn vrangeps_mask_sae(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7529        self.emit(
7530            VRANGEPS512RRRI_MASK_SAE,
7531            op0.as_operand(),
7532            op1.as_operand(),
7533            op2.as_operand(),
7534            op3.as_operand(),
7535        );
7536    }
7537}
7538
7539/// `VRANGEPS_MASKZ`.
7540///
7541/// Supported operand variants:
7542///
7543/// ```text
7544/// +---+--------------------+
7545/// | # | Operands           |
7546/// +---+--------------------+
7547/// | 1 | Xmm, Xmm, Mem, Imm |
7548/// | 2 | Xmm, Xmm, Xmm, Imm |
7549/// | 3 | Ymm, Ymm, Mem, Imm |
7550/// | 4 | Ymm, Ymm, Ymm, Imm |
7551/// | 5 | Zmm, Zmm, Mem, Imm |
7552/// | 6 | Zmm, Zmm, Zmm, Imm |
7553/// +---+--------------------+
7554/// ```
7555pub trait VrangepsMaskzEmitter<A, B, C, D> {
7556    fn vrangeps_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
7557}
7558
7559impl<'a> VrangepsMaskzEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7560    fn vrangeps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7561        self.emit(
7562            VRANGEPS128RRRI_MASKZ,
7563            op0.as_operand(),
7564            op1.as_operand(),
7565            op2.as_operand(),
7566            op3.as_operand(),
7567        );
7568    }
7569}
7570
7571impl<'a> VrangepsMaskzEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7572    fn vrangeps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7573        self.emit(
7574            VRANGEPS128RRMI_MASKZ,
7575            op0.as_operand(),
7576            op1.as_operand(),
7577            op2.as_operand(),
7578            op3.as_operand(),
7579        );
7580    }
7581}
7582
7583impl<'a> VrangepsMaskzEmitter<Ymm, Ymm, Ymm, Imm> for Assembler<'a> {
7584    fn vrangeps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm, op3: Imm) {
7585        self.emit(
7586            VRANGEPS256RRRI_MASKZ,
7587            op0.as_operand(),
7588            op1.as_operand(),
7589            op2.as_operand(),
7590            op3.as_operand(),
7591        );
7592    }
7593}
7594
7595impl<'a> VrangepsMaskzEmitter<Ymm, Ymm, Mem, Imm> for Assembler<'a> {
7596    fn vrangeps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem, op3: Imm) {
7597        self.emit(
7598            VRANGEPS256RRMI_MASKZ,
7599            op0.as_operand(),
7600            op1.as_operand(),
7601            op2.as_operand(),
7602            op3.as_operand(),
7603        );
7604    }
7605}
7606
7607impl<'a> VrangepsMaskzEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7608    fn vrangeps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7609        self.emit(
7610            VRANGEPS512RRRI_MASKZ,
7611            op0.as_operand(),
7612            op1.as_operand(),
7613            op2.as_operand(),
7614            op3.as_operand(),
7615        );
7616    }
7617}
7618
7619impl<'a> VrangepsMaskzEmitter<Zmm, Zmm, Mem, Imm> for Assembler<'a> {
7620    fn vrangeps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem, op3: Imm) {
7621        self.emit(
7622            VRANGEPS512RRMI_MASKZ,
7623            op0.as_operand(),
7624            op1.as_operand(),
7625            op2.as_operand(),
7626            op3.as_operand(),
7627        );
7628    }
7629}
7630
7631/// `VRANGEPS_MASKZ_SAE`.
7632///
7633/// Supported operand variants:
7634///
7635/// ```text
7636/// +---+--------------------+
7637/// | # | Operands           |
7638/// +---+--------------------+
7639/// | 1 | Zmm, Zmm, Zmm, Imm |
7640/// +---+--------------------+
7641/// ```
7642pub trait VrangepsMaskzSaeEmitter<A, B, C, D> {
7643    fn vrangeps_maskz_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7644}
7645
7646impl<'a> VrangepsMaskzSaeEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7647    fn vrangeps_maskz_sae(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7648        self.emit(
7649            VRANGEPS512RRRI_MASKZ_SAE,
7650            op0.as_operand(),
7651            op1.as_operand(),
7652            op2.as_operand(),
7653            op3.as_operand(),
7654        );
7655    }
7656}
7657
7658/// `VRANGEPS_SAE`.
7659///
7660/// Supported operand variants:
7661///
7662/// ```text
7663/// +---+--------------------+
7664/// | # | Operands           |
7665/// +---+--------------------+
7666/// | 1 | Zmm, Zmm, Zmm, Imm |
7667/// +---+--------------------+
7668/// ```
7669pub trait VrangepsSaeEmitter<A, B, C, D> {
7670    fn vrangeps_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7671}
7672
7673impl<'a> VrangepsSaeEmitter<Zmm, Zmm, Zmm, Imm> for Assembler<'a> {
7674    fn vrangeps_sae(&mut self, op0: Zmm, op1: Zmm, op2: Zmm, op3: Imm) {
7675        self.emit(
7676            VRANGEPS512RRRI_SAE,
7677            op0.as_operand(),
7678            op1.as_operand(),
7679            op2.as_operand(),
7680            op3.as_operand(),
7681        );
7682    }
7683}
7684
7685/// `VRANGESD`.
7686///
7687/// Supported operand variants:
7688///
7689/// ```text
7690/// +---+--------------------+
7691/// | # | Operands           |
7692/// +---+--------------------+
7693/// | 1 | Xmm, Xmm, Mem, Imm |
7694/// | 2 | Xmm, Xmm, Xmm, Imm |
7695/// +---+--------------------+
7696/// ```
7697pub trait VrangesdEmitter<A, B, C, D> {
7698    fn vrangesd(&mut self, op0: A, op1: B, op2: C, op3: D);
7699}
7700
7701impl<'a> VrangesdEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7702    fn vrangesd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7703        self.emit(
7704            VRANGESDRRRI,
7705            op0.as_operand(),
7706            op1.as_operand(),
7707            op2.as_operand(),
7708            op3.as_operand(),
7709        );
7710    }
7711}
7712
7713impl<'a> VrangesdEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7714    fn vrangesd(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7715        self.emit(
7716            VRANGESDRRMI,
7717            op0.as_operand(),
7718            op1.as_operand(),
7719            op2.as_operand(),
7720            op3.as_operand(),
7721        );
7722    }
7723}
7724
7725/// `VRANGESD_MASK`.
7726///
7727/// Supported operand variants:
7728///
7729/// ```text
7730/// +---+--------------------+
7731/// | # | Operands           |
7732/// +---+--------------------+
7733/// | 1 | Xmm, Xmm, Mem, Imm |
7734/// | 2 | Xmm, Xmm, Xmm, Imm |
7735/// +---+--------------------+
7736/// ```
7737pub trait VrangesdMaskEmitter<A, B, C, D> {
7738    fn vrangesd_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
7739}
7740
7741impl<'a> VrangesdMaskEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7742    fn vrangesd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7743        self.emit(
7744            VRANGESDRRRI_MASK,
7745            op0.as_operand(),
7746            op1.as_operand(),
7747            op2.as_operand(),
7748            op3.as_operand(),
7749        );
7750    }
7751}
7752
7753impl<'a> VrangesdMaskEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7754    fn vrangesd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7755        self.emit(
7756            VRANGESDRRMI_MASK,
7757            op0.as_operand(),
7758            op1.as_operand(),
7759            op2.as_operand(),
7760            op3.as_operand(),
7761        );
7762    }
7763}
7764
7765/// `VRANGESD_MASK_SAE`.
7766///
7767/// Supported operand variants:
7768///
7769/// ```text
7770/// +---+--------------------+
7771/// | # | Operands           |
7772/// +---+--------------------+
7773/// | 1 | Xmm, Xmm, Xmm, Imm |
7774/// +---+--------------------+
7775/// ```
7776pub trait VrangesdMaskSaeEmitter<A, B, C, D> {
7777    fn vrangesd_mask_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7778}
7779
7780impl<'a> VrangesdMaskSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7781    fn vrangesd_mask_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7782        self.emit(
7783            VRANGESDRRRI_MASK_SAE,
7784            op0.as_operand(),
7785            op1.as_operand(),
7786            op2.as_operand(),
7787            op3.as_operand(),
7788        );
7789    }
7790}
7791
7792/// `VRANGESD_MASKZ`.
7793///
7794/// Supported operand variants:
7795///
7796/// ```text
7797/// +---+--------------------+
7798/// | # | Operands           |
7799/// +---+--------------------+
7800/// | 1 | Xmm, Xmm, Mem, Imm |
7801/// | 2 | Xmm, Xmm, Xmm, Imm |
7802/// +---+--------------------+
7803/// ```
7804pub trait VrangesdMaskzEmitter<A, B, C, D> {
7805    fn vrangesd_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
7806}
7807
7808impl<'a> VrangesdMaskzEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7809    fn vrangesd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7810        self.emit(
7811            VRANGESDRRRI_MASKZ,
7812            op0.as_operand(),
7813            op1.as_operand(),
7814            op2.as_operand(),
7815            op3.as_operand(),
7816        );
7817    }
7818}
7819
7820impl<'a> VrangesdMaskzEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7821    fn vrangesd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7822        self.emit(
7823            VRANGESDRRMI_MASKZ,
7824            op0.as_operand(),
7825            op1.as_operand(),
7826            op2.as_operand(),
7827            op3.as_operand(),
7828        );
7829    }
7830}
7831
7832/// `VRANGESD_MASKZ_SAE`.
7833///
7834/// Supported operand variants:
7835///
7836/// ```text
7837/// +---+--------------------+
7838/// | # | Operands           |
7839/// +---+--------------------+
7840/// | 1 | Xmm, Xmm, Xmm, Imm |
7841/// +---+--------------------+
7842/// ```
7843pub trait VrangesdMaskzSaeEmitter<A, B, C, D> {
7844    fn vrangesd_maskz_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7845}
7846
7847impl<'a> VrangesdMaskzSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7848    fn vrangesd_maskz_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7849        self.emit(
7850            VRANGESDRRRI_MASKZ_SAE,
7851            op0.as_operand(),
7852            op1.as_operand(),
7853            op2.as_operand(),
7854            op3.as_operand(),
7855        );
7856    }
7857}
7858
7859/// `VRANGESD_SAE`.
7860///
7861/// Supported operand variants:
7862///
7863/// ```text
7864/// +---+--------------------+
7865/// | # | Operands           |
7866/// +---+--------------------+
7867/// | 1 | Xmm, Xmm, Xmm, Imm |
7868/// +---+--------------------+
7869/// ```
7870pub trait VrangesdSaeEmitter<A, B, C, D> {
7871    fn vrangesd_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7872}
7873
7874impl<'a> VrangesdSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7875    fn vrangesd_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7876        self.emit(
7877            VRANGESDRRRI_SAE,
7878            op0.as_operand(),
7879            op1.as_operand(),
7880            op2.as_operand(),
7881            op3.as_operand(),
7882        );
7883    }
7884}
7885
7886/// `VRANGESS`.
7887///
7888/// Supported operand variants:
7889///
7890/// ```text
7891/// +---+--------------------+
7892/// | # | Operands           |
7893/// +---+--------------------+
7894/// | 1 | Xmm, Xmm, Mem, Imm |
7895/// | 2 | Xmm, Xmm, Xmm, Imm |
7896/// +---+--------------------+
7897/// ```
7898pub trait VrangessEmitter<A, B, C, D> {
7899    fn vrangess(&mut self, op0: A, op1: B, op2: C, op3: D);
7900}
7901
7902impl<'a> VrangessEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7903    fn vrangess(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7904        self.emit(
7905            VRANGESSRRRI,
7906            op0.as_operand(),
7907            op1.as_operand(),
7908            op2.as_operand(),
7909            op3.as_operand(),
7910        );
7911    }
7912}
7913
7914impl<'a> VrangessEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7915    fn vrangess(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7916        self.emit(
7917            VRANGESSRRMI,
7918            op0.as_operand(),
7919            op1.as_operand(),
7920            op2.as_operand(),
7921            op3.as_operand(),
7922        );
7923    }
7924}
7925
7926/// `VRANGESS_MASK`.
7927///
7928/// Supported operand variants:
7929///
7930/// ```text
7931/// +---+--------------------+
7932/// | # | Operands           |
7933/// +---+--------------------+
7934/// | 1 | Xmm, Xmm, Mem, Imm |
7935/// | 2 | Xmm, Xmm, Xmm, Imm |
7936/// +---+--------------------+
7937/// ```
7938pub trait VrangessMaskEmitter<A, B, C, D> {
7939    fn vrangess_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
7940}
7941
7942impl<'a> VrangessMaskEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7943    fn vrangess_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7944        self.emit(
7945            VRANGESSRRRI_MASK,
7946            op0.as_operand(),
7947            op1.as_operand(),
7948            op2.as_operand(),
7949            op3.as_operand(),
7950        );
7951    }
7952}
7953
7954impl<'a> VrangessMaskEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
7955    fn vrangess_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
7956        self.emit(
7957            VRANGESSRRMI_MASK,
7958            op0.as_operand(),
7959            op1.as_operand(),
7960            op2.as_operand(),
7961            op3.as_operand(),
7962        );
7963    }
7964}
7965
7966/// `VRANGESS_MASK_SAE`.
7967///
7968/// Supported operand variants:
7969///
7970/// ```text
7971/// +---+--------------------+
7972/// | # | Operands           |
7973/// +---+--------------------+
7974/// | 1 | Xmm, Xmm, Xmm, Imm |
7975/// +---+--------------------+
7976/// ```
7977pub trait VrangessMaskSaeEmitter<A, B, C, D> {
7978    fn vrangess_mask_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
7979}
7980
7981impl<'a> VrangessMaskSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
7982    fn vrangess_mask_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
7983        self.emit(
7984            VRANGESSRRRI_MASK_SAE,
7985            op0.as_operand(),
7986            op1.as_operand(),
7987            op2.as_operand(),
7988            op3.as_operand(),
7989        );
7990    }
7991}
7992
7993/// `VRANGESS_MASKZ`.
7994///
7995/// Supported operand variants:
7996///
7997/// ```text
7998/// +---+--------------------+
7999/// | # | Operands           |
8000/// +---+--------------------+
8001/// | 1 | Xmm, Xmm, Mem, Imm |
8002/// | 2 | Xmm, Xmm, Xmm, Imm |
8003/// +---+--------------------+
8004/// ```
8005pub trait VrangessMaskzEmitter<A, B, C, D> {
8006    fn vrangess_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
8007}
8008
8009impl<'a> VrangessMaskzEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8010    fn vrangess_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8011        self.emit(
8012            VRANGESSRRRI_MASKZ,
8013            op0.as_operand(),
8014            op1.as_operand(),
8015            op2.as_operand(),
8016            op3.as_operand(),
8017        );
8018    }
8019}
8020
8021impl<'a> VrangessMaskzEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
8022    fn vrangess_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
8023        self.emit(
8024            VRANGESSRRMI_MASKZ,
8025            op0.as_operand(),
8026            op1.as_operand(),
8027            op2.as_operand(),
8028            op3.as_operand(),
8029        );
8030    }
8031}
8032
8033/// `VRANGESS_MASKZ_SAE`.
8034///
8035/// Supported operand variants:
8036///
8037/// ```text
8038/// +---+--------------------+
8039/// | # | Operands           |
8040/// +---+--------------------+
8041/// | 1 | Xmm, Xmm, Xmm, Imm |
8042/// +---+--------------------+
8043/// ```
8044pub trait VrangessMaskzSaeEmitter<A, B, C, D> {
8045    fn vrangess_maskz_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
8046}
8047
8048impl<'a> VrangessMaskzSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8049    fn vrangess_maskz_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8050        self.emit(
8051            VRANGESSRRRI_MASKZ_SAE,
8052            op0.as_operand(),
8053            op1.as_operand(),
8054            op2.as_operand(),
8055            op3.as_operand(),
8056        );
8057    }
8058}
8059
8060/// `VRANGESS_SAE`.
8061///
8062/// Supported operand variants:
8063///
8064/// ```text
8065/// +---+--------------------+
8066/// | # | Operands           |
8067/// +---+--------------------+
8068/// | 1 | Xmm, Xmm, Xmm, Imm |
8069/// +---+--------------------+
8070/// ```
8071pub trait VrangessSaeEmitter<A, B, C, D> {
8072    fn vrangess_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
8073}
8074
8075impl<'a> VrangessSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8076    fn vrangess_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8077        self.emit(
8078            VRANGESSRRRI_SAE,
8079            op0.as_operand(),
8080            op1.as_operand(),
8081            op2.as_operand(),
8082            op3.as_operand(),
8083        );
8084    }
8085}
8086
8087/// `VREDUCEPD`.
8088///
8089/// Supported operand variants:
8090///
8091/// ```text
8092/// +---+---------------+
8093/// | # | Operands      |
8094/// +---+---------------+
8095/// | 1 | Xmm, Mem, Imm |
8096/// | 2 | Xmm, Xmm, Imm |
8097/// | 3 | Ymm, Mem, Imm |
8098/// | 4 | Ymm, Ymm, Imm |
8099/// | 5 | Zmm, Mem, Imm |
8100/// | 6 | Zmm, Zmm, Imm |
8101/// +---+---------------+
8102/// ```
8103pub trait VreducepdEmitter<A, B, C> {
8104    fn vreducepd(&mut self, op0: A, op1: B, op2: C);
8105}
8106
8107impl<'a> VreducepdEmitter<Xmm, Xmm, Imm> for Assembler<'a> {
8108    fn vreducepd(&mut self, op0: Xmm, op1: Xmm, op2: Imm) {
8109        self.emit(
8110            VREDUCEPD128RRI,
8111            op0.as_operand(),
8112            op1.as_operand(),
8113            op2.as_operand(),
8114            &NOREG,
8115        );
8116    }
8117}
8118
8119impl<'a> VreducepdEmitter<Xmm, Mem, Imm> for Assembler<'a> {
8120    fn vreducepd(&mut self, op0: Xmm, op1: Mem, op2: Imm) {
8121        self.emit(
8122            VREDUCEPD128RMI,
8123            op0.as_operand(),
8124            op1.as_operand(),
8125            op2.as_operand(),
8126            &NOREG,
8127        );
8128    }
8129}
8130
8131impl<'a> VreducepdEmitter<Ymm, Ymm, Imm> for Assembler<'a> {
8132    fn vreducepd(&mut self, op0: Ymm, op1: Ymm, op2: Imm) {
8133        self.emit(
8134            VREDUCEPD256RRI,
8135            op0.as_operand(),
8136            op1.as_operand(),
8137            op2.as_operand(),
8138            &NOREG,
8139        );
8140    }
8141}
8142
8143impl<'a> VreducepdEmitter<Ymm, Mem, Imm> for Assembler<'a> {
8144    fn vreducepd(&mut self, op0: Ymm, op1: Mem, op2: Imm) {
8145        self.emit(
8146            VREDUCEPD256RMI,
8147            op0.as_operand(),
8148            op1.as_operand(),
8149            op2.as_operand(),
8150            &NOREG,
8151        );
8152    }
8153}
8154
8155impl<'a> VreducepdEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8156    fn vreducepd(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8157        self.emit(
8158            VREDUCEPD512RRI,
8159            op0.as_operand(),
8160            op1.as_operand(),
8161            op2.as_operand(),
8162            &NOREG,
8163        );
8164    }
8165}
8166
8167impl<'a> VreducepdEmitter<Zmm, Mem, Imm> for Assembler<'a> {
8168    fn vreducepd(&mut self, op0: Zmm, op1: Mem, op2: Imm) {
8169        self.emit(
8170            VREDUCEPD512RMI,
8171            op0.as_operand(),
8172            op1.as_operand(),
8173            op2.as_operand(),
8174            &NOREG,
8175        );
8176    }
8177}
8178
8179/// `VREDUCEPD_MASK`.
8180///
8181/// Supported operand variants:
8182///
8183/// ```text
8184/// +---+---------------+
8185/// | # | Operands      |
8186/// +---+---------------+
8187/// | 1 | Xmm, Mem, Imm |
8188/// | 2 | Xmm, Xmm, Imm |
8189/// | 3 | Ymm, Mem, Imm |
8190/// | 4 | Ymm, Ymm, Imm |
8191/// | 5 | Zmm, Mem, Imm |
8192/// | 6 | Zmm, Zmm, Imm |
8193/// +---+---------------+
8194/// ```
8195pub trait VreducepdMaskEmitter<A, B, C> {
8196    fn vreducepd_mask(&mut self, op0: A, op1: B, op2: C);
8197}
8198
8199impl<'a> VreducepdMaskEmitter<Xmm, Xmm, Imm> for Assembler<'a> {
8200    fn vreducepd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Imm) {
8201        self.emit(
8202            VREDUCEPD128RRI_MASK,
8203            op0.as_operand(),
8204            op1.as_operand(),
8205            op2.as_operand(),
8206            &NOREG,
8207        );
8208    }
8209}
8210
8211impl<'a> VreducepdMaskEmitter<Xmm, Mem, Imm> for Assembler<'a> {
8212    fn vreducepd_mask(&mut self, op0: Xmm, op1: Mem, op2: Imm) {
8213        self.emit(
8214            VREDUCEPD128RMI_MASK,
8215            op0.as_operand(),
8216            op1.as_operand(),
8217            op2.as_operand(),
8218            &NOREG,
8219        );
8220    }
8221}
8222
8223impl<'a> VreducepdMaskEmitter<Ymm, Ymm, Imm> for Assembler<'a> {
8224    fn vreducepd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Imm) {
8225        self.emit(
8226            VREDUCEPD256RRI_MASK,
8227            op0.as_operand(),
8228            op1.as_operand(),
8229            op2.as_operand(),
8230            &NOREG,
8231        );
8232    }
8233}
8234
8235impl<'a> VreducepdMaskEmitter<Ymm, Mem, Imm> for Assembler<'a> {
8236    fn vreducepd_mask(&mut self, op0: Ymm, op1: Mem, op2: Imm) {
8237        self.emit(
8238            VREDUCEPD256RMI_MASK,
8239            op0.as_operand(),
8240            op1.as_operand(),
8241            op2.as_operand(),
8242            &NOREG,
8243        );
8244    }
8245}
8246
8247impl<'a> VreducepdMaskEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8248    fn vreducepd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8249        self.emit(
8250            VREDUCEPD512RRI_MASK,
8251            op0.as_operand(),
8252            op1.as_operand(),
8253            op2.as_operand(),
8254            &NOREG,
8255        );
8256    }
8257}
8258
8259impl<'a> VreducepdMaskEmitter<Zmm, Mem, Imm> for Assembler<'a> {
8260    fn vreducepd_mask(&mut self, op0: Zmm, op1: Mem, op2: Imm) {
8261        self.emit(
8262            VREDUCEPD512RMI_MASK,
8263            op0.as_operand(),
8264            op1.as_operand(),
8265            op2.as_operand(),
8266            &NOREG,
8267        );
8268    }
8269}
8270
8271/// `VREDUCEPD_MASK_SAE`.
8272///
8273/// Supported operand variants:
8274///
8275/// ```text
8276/// +---+---------------+
8277/// | # | Operands      |
8278/// +---+---------------+
8279/// | 1 | Zmm, Zmm, Imm |
8280/// +---+---------------+
8281/// ```
8282pub trait VreducepdMaskSaeEmitter<A, B, C> {
8283    fn vreducepd_mask_sae(&mut self, op0: A, op1: B, op2: C);
8284}
8285
8286impl<'a> VreducepdMaskSaeEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8287    fn vreducepd_mask_sae(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8288        self.emit(
8289            VREDUCEPD512RRI_MASK_SAE,
8290            op0.as_operand(),
8291            op1.as_operand(),
8292            op2.as_operand(),
8293            &NOREG,
8294        );
8295    }
8296}
8297
8298/// `VREDUCEPD_MASKZ`.
8299///
8300/// Supported operand variants:
8301///
8302/// ```text
8303/// +---+---------------+
8304/// | # | Operands      |
8305/// +---+---------------+
8306/// | 1 | Xmm, Mem, Imm |
8307/// | 2 | Xmm, Xmm, Imm |
8308/// | 3 | Ymm, Mem, Imm |
8309/// | 4 | Ymm, Ymm, Imm |
8310/// | 5 | Zmm, Mem, Imm |
8311/// | 6 | Zmm, Zmm, Imm |
8312/// +---+---------------+
8313/// ```
8314pub trait VreducepdMaskzEmitter<A, B, C> {
8315    fn vreducepd_maskz(&mut self, op0: A, op1: B, op2: C);
8316}
8317
8318impl<'a> VreducepdMaskzEmitter<Xmm, Xmm, Imm> for Assembler<'a> {
8319    fn vreducepd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Imm) {
8320        self.emit(
8321            VREDUCEPD128RRI_MASKZ,
8322            op0.as_operand(),
8323            op1.as_operand(),
8324            op2.as_operand(),
8325            &NOREG,
8326        );
8327    }
8328}
8329
8330impl<'a> VreducepdMaskzEmitter<Xmm, Mem, Imm> for Assembler<'a> {
8331    fn vreducepd_maskz(&mut self, op0: Xmm, op1: Mem, op2: Imm) {
8332        self.emit(
8333            VREDUCEPD128RMI_MASKZ,
8334            op0.as_operand(),
8335            op1.as_operand(),
8336            op2.as_operand(),
8337            &NOREG,
8338        );
8339    }
8340}
8341
8342impl<'a> VreducepdMaskzEmitter<Ymm, Ymm, Imm> for Assembler<'a> {
8343    fn vreducepd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Imm) {
8344        self.emit(
8345            VREDUCEPD256RRI_MASKZ,
8346            op0.as_operand(),
8347            op1.as_operand(),
8348            op2.as_operand(),
8349            &NOREG,
8350        );
8351    }
8352}
8353
8354impl<'a> VreducepdMaskzEmitter<Ymm, Mem, Imm> for Assembler<'a> {
8355    fn vreducepd_maskz(&mut self, op0: Ymm, op1: Mem, op2: Imm) {
8356        self.emit(
8357            VREDUCEPD256RMI_MASKZ,
8358            op0.as_operand(),
8359            op1.as_operand(),
8360            op2.as_operand(),
8361            &NOREG,
8362        );
8363    }
8364}
8365
8366impl<'a> VreducepdMaskzEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8367    fn vreducepd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8368        self.emit(
8369            VREDUCEPD512RRI_MASKZ,
8370            op0.as_operand(),
8371            op1.as_operand(),
8372            op2.as_operand(),
8373            &NOREG,
8374        );
8375    }
8376}
8377
8378impl<'a> VreducepdMaskzEmitter<Zmm, Mem, Imm> for Assembler<'a> {
8379    fn vreducepd_maskz(&mut self, op0: Zmm, op1: Mem, op2: Imm) {
8380        self.emit(
8381            VREDUCEPD512RMI_MASKZ,
8382            op0.as_operand(),
8383            op1.as_operand(),
8384            op2.as_operand(),
8385            &NOREG,
8386        );
8387    }
8388}
8389
8390/// `VREDUCEPD_MASKZ_SAE`.
8391///
8392/// Supported operand variants:
8393///
8394/// ```text
8395/// +---+---------------+
8396/// | # | Operands      |
8397/// +---+---------------+
8398/// | 1 | Zmm, Zmm, Imm |
8399/// +---+---------------+
8400/// ```
8401pub trait VreducepdMaskzSaeEmitter<A, B, C> {
8402    fn vreducepd_maskz_sae(&mut self, op0: A, op1: B, op2: C);
8403}
8404
8405impl<'a> VreducepdMaskzSaeEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8406    fn vreducepd_maskz_sae(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8407        self.emit(
8408            VREDUCEPD512RRI_MASKZ_SAE,
8409            op0.as_operand(),
8410            op1.as_operand(),
8411            op2.as_operand(),
8412            &NOREG,
8413        );
8414    }
8415}
8416
8417/// `VREDUCEPD_SAE`.
8418///
8419/// Supported operand variants:
8420///
8421/// ```text
8422/// +---+---------------+
8423/// | # | Operands      |
8424/// +---+---------------+
8425/// | 1 | Zmm, Zmm, Imm |
8426/// +---+---------------+
8427/// ```
8428pub trait VreducepdSaeEmitter<A, B, C> {
8429    fn vreducepd_sae(&mut self, op0: A, op1: B, op2: C);
8430}
8431
8432impl<'a> VreducepdSaeEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8433    fn vreducepd_sae(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8434        self.emit(
8435            VREDUCEPD512RRI_SAE,
8436            op0.as_operand(),
8437            op1.as_operand(),
8438            op2.as_operand(),
8439            &NOREG,
8440        );
8441    }
8442}
8443
8444/// `VREDUCEPS`.
8445///
8446/// Supported operand variants:
8447///
8448/// ```text
8449/// +---+---------------+
8450/// | # | Operands      |
8451/// +---+---------------+
8452/// | 1 | Xmm, Mem, Imm |
8453/// | 2 | Xmm, Xmm, Imm |
8454/// | 3 | Ymm, Mem, Imm |
8455/// | 4 | Ymm, Ymm, Imm |
8456/// | 5 | Zmm, Mem, Imm |
8457/// | 6 | Zmm, Zmm, Imm |
8458/// +---+---------------+
8459/// ```
8460pub trait VreducepsEmitter<A, B, C> {
8461    fn vreduceps(&mut self, op0: A, op1: B, op2: C);
8462}
8463
8464impl<'a> VreducepsEmitter<Xmm, Xmm, Imm> for Assembler<'a> {
8465    fn vreduceps(&mut self, op0: Xmm, op1: Xmm, op2: Imm) {
8466        self.emit(
8467            VREDUCEPS128RRI,
8468            op0.as_operand(),
8469            op1.as_operand(),
8470            op2.as_operand(),
8471            &NOREG,
8472        );
8473    }
8474}
8475
8476impl<'a> VreducepsEmitter<Xmm, Mem, Imm> for Assembler<'a> {
8477    fn vreduceps(&mut self, op0: Xmm, op1: Mem, op2: Imm) {
8478        self.emit(
8479            VREDUCEPS128RMI,
8480            op0.as_operand(),
8481            op1.as_operand(),
8482            op2.as_operand(),
8483            &NOREG,
8484        );
8485    }
8486}
8487
8488impl<'a> VreducepsEmitter<Ymm, Ymm, Imm> for Assembler<'a> {
8489    fn vreduceps(&mut self, op0: Ymm, op1: Ymm, op2: Imm) {
8490        self.emit(
8491            VREDUCEPS256RRI,
8492            op0.as_operand(),
8493            op1.as_operand(),
8494            op2.as_operand(),
8495            &NOREG,
8496        );
8497    }
8498}
8499
8500impl<'a> VreducepsEmitter<Ymm, Mem, Imm> for Assembler<'a> {
8501    fn vreduceps(&mut self, op0: Ymm, op1: Mem, op2: Imm) {
8502        self.emit(
8503            VREDUCEPS256RMI,
8504            op0.as_operand(),
8505            op1.as_operand(),
8506            op2.as_operand(),
8507            &NOREG,
8508        );
8509    }
8510}
8511
8512impl<'a> VreducepsEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8513    fn vreduceps(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8514        self.emit(
8515            VREDUCEPS512RRI,
8516            op0.as_operand(),
8517            op1.as_operand(),
8518            op2.as_operand(),
8519            &NOREG,
8520        );
8521    }
8522}
8523
8524impl<'a> VreducepsEmitter<Zmm, Mem, Imm> for Assembler<'a> {
8525    fn vreduceps(&mut self, op0: Zmm, op1: Mem, op2: Imm) {
8526        self.emit(
8527            VREDUCEPS512RMI,
8528            op0.as_operand(),
8529            op1.as_operand(),
8530            op2.as_operand(),
8531            &NOREG,
8532        );
8533    }
8534}
8535
8536/// `VREDUCEPS_MASK`.
8537///
8538/// Supported operand variants:
8539///
8540/// ```text
8541/// +---+---------------+
8542/// | # | Operands      |
8543/// +---+---------------+
8544/// | 1 | Xmm, Mem, Imm |
8545/// | 2 | Xmm, Xmm, Imm |
8546/// | 3 | Ymm, Mem, Imm |
8547/// | 4 | Ymm, Ymm, Imm |
8548/// | 5 | Zmm, Mem, Imm |
8549/// | 6 | Zmm, Zmm, Imm |
8550/// +---+---------------+
8551/// ```
8552pub trait VreducepsMaskEmitter<A, B, C> {
8553    fn vreduceps_mask(&mut self, op0: A, op1: B, op2: C);
8554}
8555
8556impl<'a> VreducepsMaskEmitter<Xmm, Xmm, Imm> for Assembler<'a> {
8557    fn vreduceps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Imm) {
8558        self.emit(
8559            VREDUCEPS128RRI_MASK,
8560            op0.as_operand(),
8561            op1.as_operand(),
8562            op2.as_operand(),
8563            &NOREG,
8564        );
8565    }
8566}
8567
8568impl<'a> VreducepsMaskEmitter<Xmm, Mem, Imm> for Assembler<'a> {
8569    fn vreduceps_mask(&mut self, op0: Xmm, op1: Mem, op2: Imm) {
8570        self.emit(
8571            VREDUCEPS128RMI_MASK,
8572            op0.as_operand(),
8573            op1.as_operand(),
8574            op2.as_operand(),
8575            &NOREG,
8576        );
8577    }
8578}
8579
8580impl<'a> VreducepsMaskEmitter<Ymm, Ymm, Imm> for Assembler<'a> {
8581    fn vreduceps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Imm) {
8582        self.emit(
8583            VREDUCEPS256RRI_MASK,
8584            op0.as_operand(),
8585            op1.as_operand(),
8586            op2.as_operand(),
8587            &NOREG,
8588        );
8589    }
8590}
8591
8592impl<'a> VreducepsMaskEmitter<Ymm, Mem, Imm> for Assembler<'a> {
8593    fn vreduceps_mask(&mut self, op0: Ymm, op1: Mem, op2: Imm) {
8594        self.emit(
8595            VREDUCEPS256RMI_MASK,
8596            op0.as_operand(),
8597            op1.as_operand(),
8598            op2.as_operand(),
8599            &NOREG,
8600        );
8601    }
8602}
8603
8604impl<'a> VreducepsMaskEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8605    fn vreduceps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8606        self.emit(
8607            VREDUCEPS512RRI_MASK,
8608            op0.as_operand(),
8609            op1.as_operand(),
8610            op2.as_operand(),
8611            &NOREG,
8612        );
8613    }
8614}
8615
8616impl<'a> VreducepsMaskEmitter<Zmm, Mem, Imm> for Assembler<'a> {
8617    fn vreduceps_mask(&mut self, op0: Zmm, op1: Mem, op2: Imm) {
8618        self.emit(
8619            VREDUCEPS512RMI_MASK,
8620            op0.as_operand(),
8621            op1.as_operand(),
8622            op2.as_operand(),
8623            &NOREG,
8624        );
8625    }
8626}
8627
8628/// `VREDUCEPS_MASK_SAE`.
8629///
8630/// Supported operand variants:
8631///
8632/// ```text
8633/// +---+---------------+
8634/// | # | Operands      |
8635/// +---+---------------+
8636/// | 1 | Zmm, Zmm, Imm |
8637/// +---+---------------+
8638/// ```
8639pub trait VreducepsMaskSaeEmitter<A, B, C> {
8640    fn vreduceps_mask_sae(&mut self, op0: A, op1: B, op2: C);
8641}
8642
8643impl<'a> VreducepsMaskSaeEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8644    fn vreduceps_mask_sae(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8645        self.emit(
8646            VREDUCEPS512RRI_MASK_SAE,
8647            op0.as_operand(),
8648            op1.as_operand(),
8649            op2.as_operand(),
8650            &NOREG,
8651        );
8652    }
8653}
8654
8655/// `VREDUCEPS_MASKZ`.
8656///
8657/// Supported operand variants:
8658///
8659/// ```text
8660/// +---+---------------+
8661/// | # | Operands      |
8662/// +---+---------------+
8663/// | 1 | Xmm, Mem, Imm |
8664/// | 2 | Xmm, Xmm, Imm |
8665/// | 3 | Ymm, Mem, Imm |
8666/// | 4 | Ymm, Ymm, Imm |
8667/// | 5 | Zmm, Mem, Imm |
8668/// | 6 | Zmm, Zmm, Imm |
8669/// +---+---------------+
8670/// ```
8671pub trait VreducepsMaskzEmitter<A, B, C> {
8672    fn vreduceps_maskz(&mut self, op0: A, op1: B, op2: C);
8673}
8674
8675impl<'a> VreducepsMaskzEmitter<Xmm, Xmm, Imm> for Assembler<'a> {
8676    fn vreduceps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Imm) {
8677        self.emit(
8678            VREDUCEPS128RRI_MASKZ,
8679            op0.as_operand(),
8680            op1.as_operand(),
8681            op2.as_operand(),
8682            &NOREG,
8683        );
8684    }
8685}
8686
8687impl<'a> VreducepsMaskzEmitter<Xmm, Mem, Imm> for Assembler<'a> {
8688    fn vreduceps_maskz(&mut self, op0: Xmm, op1: Mem, op2: Imm) {
8689        self.emit(
8690            VREDUCEPS128RMI_MASKZ,
8691            op0.as_operand(),
8692            op1.as_operand(),
8693            op2.as_operand(),
8694            &NOREG,
8695        );
8696    }
8697}
8698
8699impl<'a> VreducepsMaskzEmitter<Ymm, Ymm, Imm> for Assembler<'a> {
8700    fn vreduceps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Imm) {
8701        self.emit(
8702            VREDUCEPS256RRI_MASKZ,
8703            op0.as_operand(),
8704            op1.as_operand(),
8705            op2.as_operand(),
8706            &NOREG,
8707        );
8708    }
8709}
8710
8711impl<'a> VreducepsMaskzEmitter<Ymm, Mem, Imm> for Assembler<'a> {
8712    fn vreduceps_maskz(&mut self, op0: Ymm, op1: Mem, op2: Imm) {
8713        self.emit(
8714            VREDUCEPS256RMI_MASKZ,
8715            op0.as_operand(),
8716            op1.as_operand(),
8717            op2.as_operand(),
8718            &NOREG,
8719        );
8720    }
8721}
8722
8723impl<'a> VreducepsMaskzEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8724    fn vreduceps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8725        self.emit(
8726            VREDUCEPS512RRI_MASKZ,
8727            op0.as_operand(),
8728            op1.as_operand(),
8729            op2.as_operand(),
8730            &NOREG,
8731        );
8732    }
8733}
8734
8735impl<'a> VreducepsMaskzEmitter<Zmm, Mem, Imm> for Assembler<'a> {
8736    fn vreduceps_maskz(&mut self, op0: Zmm, op1: Mem, op2: Imm) {
8737        self.emit(
8738            VREDUCEPS512RMI_MASKZ,
8739            op0.as_operand(),
8740            op1.as_operand(),
8741            op2.as_operand(),
8742            &NOREG,
8743        );
8744    }
8745}
8746
8747/// `VREDUCEPS_MASKZ_SAE`.
8748///
8749/// Supported operand variants:
8750///
8751/// ```text
8752/// +---+---------------+
8753/// | # | Operands      |
8754/// +---+---------------+
8755/// | 1 | Zmm, Zmm, Imm |
8756/// +---+---------------+
8757/// ```
8758pub trait VreducepsMaskzSaeEmitter<A, B, C> {
8759    fn vreduceps_maskz_sae(&mut self, op0: A, op1: B, op2: C);
8760}
8761
8762impl<'a> VreducepsMaskzSaeEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8763    fn vreduceps_maskz_sae(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8764        self.emit(
8765            VREDUCEPS512RRI_MASKZ_SAE,
8766            op0.as_operand(),
8767            op1.as_operand(),
8768            op2.as_operand(),
8769            &NOREG,
8770        );
8771    }
8772}
8773
8774/// `VREDUCEPS_SAE`.
8775///
8776/// Supported operand variants:
8777///
8778/// ```text
8779/// +---+---------------+
8780/// | # | Operands      |
8781/// +---+---------------+
8782/// | 1 | Zmm, Zmm, Imm |
8783/// +---+---------------+
8784/// ```
8785pub trait VreducepsSaeEmitter<A, B, C> {
8786    fn vreduceps_sae(&mut self, op0: A, op1: B, op2: C);
8787}
8788
8789impl<'a> VreducepsSaeEmitter<Zmm, Zmm, Imm> for Assembler<'a> {
8790    fn vreduceps_sae(&mut self, op0: Zmm, op1: Zmm, op2: Imm) {
8791        self.emit(
8792            VREDUCEPS512RRI_SAE,
8793            op0.as_operand(),
8794            op1.as_operand(),
8795            op2.as_operand(),
8796            &NOREG,
8797        );
8798    }
8799}
8800
8801/// `VREDUCESD`.
8802///
8803/// Supported operand variants:
8804///
8805/// ```text
8806/// +---+--------------------+
8807/// | # | Operands           |
8808/// +---+--------------------+
8809/// | 1 | Xmm, Xmm, Mem, Imm |
8810/// | 2 | Xmm, Xmm, Xmm, Imm |
8811/// +---+--------------------+
8812/// ```
8813pub trait VreducesdEmitter<A, B, C, D> {
8814    fn vreducesd(&mut self, op0: A, op1: B, op2: C, op3: D);
8815}
8816
8817impl<'a> VreducesdEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8818    fn vreducesd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8819        self.emit(
8820            VREDUCESDRRRI,
8821            op0.as_operand(),
8822            op1.as_operand(),
8823            op2.as_operand(),
8824            op3.as_operand(),
8825        );
8826    }
8827}
8828
8829impl<'a> VreducesdEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
8830    fn vreducesd(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
8831        self.emit(
8832            VREDUCESDRRMI,
8833            op0.as_operand(),
8834            op1.as_operand(),
8835            op2.as_operand(),
8836            op3.as_operand(),
8837        );
8838    }
8839}
8840
8841/// `VREDUCESD_MASK`.
8842///
8843/// Supported operand variants:
8844///
8845/// ```text
8846/// +---+--------------------+
8847/// | # | Operands           |
8848/// +---+--------------------+
8849/// | 1 | Xmm, Xmm, Mem, Imm |
8850/// | 2 | Xmm, Xmm, Xmm, Imm |
8851/// +---+--------------------+
8852/// ```
8853pub trait VreducesdMaskEmitter<A, B, C, D> {
8854    fn vreducesd_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
8855}
8856
8857impl<'a> VreducesdMaskEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8858    fn vreducesd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8859        self.emit(
8860            VREDUCESDRRRI_MASK,
8861            op0.as_operand(),
8862            op1.as_operand(),
8863            op2.as_operand(),
8864            op3.as_operand(),
8865        );
8866    }
8867}
8868
8869impl<'a> VreducesdMaskEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
8870    fn vreducesd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
8871        self.emit(
8872            VREDUCESDRRMI_MASK,
8873            op0.as_operand(),
8874            op1.as_operand(),
8875            op2.as_operand(),
8876            op3.as_operand(),
8877        );
8878    }
8879}
8880
8881/// `VREDUCESD_MASK_SAE`.
8882///
8883/// Supported operand variants:
8884///
8885/// ```text
8886/// +---+--------------------+
8887/// | # | Operands           |
8888/// +---+--------------------+
8889/// | 1 | Xmm, Xmm, Xmm, Imm |
8890/// +---+--------------------+
8891/// ```
8892pub trait VreducesdMaskSaeEmitter<A, B, C, D> {
8893    fn vreducesd_mask_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
8894}
8895
8896impl<'a> VreducesdMaskSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8897    fn vreducesd_mask_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8898        self.emit(
8899            VREDUCESDRRRI_MASK_SAE,
8900            op0.as_operand(),
8901            op1.as_operand(),
8902            op2.as_operand(),
8903            op3.as_operand(),
8904        );
8905    }
8906}
8907
8908/// `VREDUCESD_MASKZ`.
8909///
8910/// Supported operand variants:
8911///
8912/// ```text
8913/// +---+--------------------+
8914/// | # | Operands           |
8915/// +---+--------------------+
8916/// | 1 | Xmm, Xmm, Mem, Imm |
8917/// | 2 | Xmm, Xmm, Xmm, Imm |
8918/// +---+--------------------+
8919/// ```
8920pub trait VreducesdMaskzEmitter<A, B, C, D> {
8921    fn vreducesd_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
8922}
8923
8924impl<'a> VreducesdMaskzEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8925    fn vreducesd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8926        self.emit(
8927            VREDUCESDRRRI_MASKZ,
8928            op0.as_operand(),
8929            op1.as_operand(),
8930            op2.as_operand(),
8931            op3.as_operand(),
8932        );
8933    }
8934}
8935
8936impl<'a> VreducesdMaskzEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
8937    fn vreducesd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
8938        self.emit(
8939            VREDUCESDRRMI_MASKZ,
8940            op0.as_operand(),
8941            op1.as_operand(),
8942            op2.as_operand(),
8943            op3.as_operand(),
8944        );
8945    }
8946}
8947
8948/// `VREDUCESD_MASKZ_SAE`.
8949///
8950/// Supported operand variants:
8951///
8952/// ```text
8953/// +---+--------------------+
8954/// | # | Operands           |
8955/// +---+--------------------+
8956/// | 1 | Xmm, Xmm, Xmm, Imm |
8957/// +---+--------------------+
8958/// ```
8959pub trait VreducesdMaskzSaeEmitter<A, B, C, D> {
8960    fn vreducesd_maskz_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
8961}
8962
8963impl<'a> VreducesdMaskzSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8964    fn vreducesd_maskz_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8965        self.emit(
8966            VREDUCESDRRRI_MASKZ_SAE,
8967            op0.as_operand(),
8968            op1.as_operand(),
8969            op2.as_operand(),
8970            op3.as_operand(),
8971        );
8972    }
8973}
8974
8975/// `VREDUCESD_SAE`.
8976///
8977/// Supported operand variants:
8978///
8979/// ```text
8980/// +---+--------------------+
8981/// | # | Operands           |
8982/// +---+--------------------+
8983/// | 1 | Xmm, Xmm, Xmm, Imm |
8984/// +---+--------------------+
8985/// ```
8986pub trait VreducesdSaeEmitter<A, B, C, D> {
8987    fn vreducesd_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
8988}
8989
8990impl<'a> VreducesdSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
8991    fn vreducesd_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
8992        self.emit(
8993            VREDUCESDRRRI_SAE,
8994            op0.as_operand(),
8995            op1.as_operand(),
8996            op2.as_operand(),
8997            op3.as_operand(),
8998        );
8999    }
9000}
9001
9002/// `VREDUCESS`.
9003///
9004/// Supported operand variants:
9005///
9006/// ```text
9007/// +---+--------------------+
9008/// | # | Operands           |
9009/// +---+--------------------+
9010/// | 1 | Xmm, Xmm, Mem, Imm |
9011/// | 2 | Xmm, Xmm, Xmm, Imm |
9012/// +---+--------------------+
9013/// ```
9014pub trait VreducessEmitter<A, B, C, D> {
9015    fn vreducess(&mut self, op0: A, op1: B, op2: C, op3: D);
9016}
9017
9018impl<'a> VreducessEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
9019    fn vreducess(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
9020        self.emit(
9021            VREDUCESSRRRI,
9022            op0.as_operand(),
9023            op1.as_operand(),
9024            op2.as_operand(),
9025            op3.as_operand(),
9026        );
9027    }
9028}
9029
9030impl<'a> VreducessEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
9031    fn vreducess(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
9032        self.emit(
9033            VREDUCESSRRMI,
9034            op0.as_operand(),
9035            op1.as_operand(),
9036            op2.as_operand(),
9037            op3.as_operand(),
9038        );
9039    }
9040}
9041
9042/// `VREDUCESS_MASK`.
9043///
9044/// Supported operand variants:
9045///
9046/// ```text
9047/// +---+--------------------+
9048/// | # | Operands           |
9049/// +---+--------------------+
9050/// | 1 | Xmm, Xmm, Mem, Imm |
9051/// | 2 | Xmm, Xmm, Xmm, Imm |
9052/// +---+--------------------+
9053/// ```
9054pub trait VreducessMaskEmitter<A, B, C, D> {
9055    fn vreducess_mask(&mut self, op0: A, op1: B, op2: C, op3: D);
9056}
9057
9058impl<'a> VreducessMaskEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
9059    fn vreducess_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
9060        self.emit(
9061            VREDUCESSRRRI_MASK,
9062            op0.as_operand(),
9063            op1.as_operand(),
9064            op2.as_operand(),
9065            op3.as_operand(),
9066        );
9067    }
9068}
9069
9070impl<'a> VreducessMaskEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
9071    fn vreducess_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
9072        self.emit(
9073            VREDUCESSRRMI_MASK,
9074            op0.as_operand(),
9075            op1.as_operand(),
9076            op2.as_operand(),
9077            op3.as_operand(),
9078        );
9079    }
9080}
9081
9082/// `VREDUCESS_MASK_SAE`.
9083///
9084/// Supported operand variants:
9085///
9086/// ```text
9087/// +---+--------------------+
9088/// | # | Operands           |
9089/// +---+--------------------+
9090/// | 1 | Xmm, Xmm, Xmm, Imm |
9091/// +---+--------------------+
9092/// ```
9093pub trait VreducessMaskSaeEmitter<A, B, C, D> {
9094    fn vreducess_mask_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
9095}
9096
9097impl<'a> VreducessMaskSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
9098    fn vreducess_mask_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
9099        self.emit(
9100            VREDUCESSRRRI_MASK_SAE,
9101            op0.as_operand(),
9102            op1.as_operand(),
9103            op2.as_operand(),
9104            op3.as_operand(),
9105        );
9106    }
9107}
9108
9109/// `VREDUCESS_MASKZ`.
9110///
9111/// Supported operand variants:
9112///
9113/// ```text
9114/// +---+--------------------+
9115/// | # | Operands           |
9116/// +---+--------------------+
9117/// | 1 | Xmm, Xmm, Mem, Imm |
9118/// | 2 | Xmm, Xmm, Xmm, Imm |
9119/// +---+--------------------+
9120/// ```
9121pub trait VreducessMaskzEmitter<A, B, C, D> {
9122    fn vreducess_maskz(&mut self, op0: A, op1: B, op2: C, op3: D);
9123}
9124
9125impl<'a> VreducessMaskzEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
9126    fn vreducess_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
9127        self.emit(
9128            VREDUCESSRRRI_MASKZ,
9129            op0.as_operand(),
9130            op1.as_operand(),
9131            op2.as_operand(),
9132            op3.as_operand(),
9133        );
9134    }
9135}
9136
9137impl<'a> VreducessMaskzEmitter<Xmm, Xmm, Mem, Imm> for Assembler<'a> {
9138    fn vreducess_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem, op3: Imm) {
9139        self.emit(
9140            VREDUCESSRRMI_MASKZ,
9141            op0.as_operand(),
9142            op1.as_operand(),
9143            op2.as_operand(),
9144            op3.as_operand(),
9145        );
9146    }
9147}
9148
9149/// `VREDUCESS_MASKZ_SAE`.
9150///
9151/// Supported operand variants:
9152///
9153/// ```text
9154/// +---+--------------------+
9155/// | # | Operands           |
9156/// +---+--------------------+
9157/// | 1 | Xmm, Xmm, Xmm, Imm |
9158/// +---+--------------------+
9159/// ```
9160pub trait VreducessMaskzSaeEmitter<A, B, C, D> {
9161    fn vreducess_maskz_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
9162}
9163
9164impl<'a> VreducessMaskzSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
9165    fn vreducess_maskz_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
9166        self.emit(
9167            VREDUCESSRRRI_MASKZ_SAE,
9168            op0.as_operand(),
9169            op1.as_operand(),
9170            op2.as_operand(),
9171            op3.as_operand(),
9172        );
9173    }
9174}
9175
9176/// `VREDUCESS_SAE`.
9177///
9178/// Supported operand variants:
9179///
9180/// ```text
9181/// +---+--------------------+
9182/// | # | Operands           |
9183/// +---+--------------------+
9184/// | 1 | Xmm, Xmm, Xmm, Imm |
9185/// +---+--------------------+
9186/// ```
9187pub trait VreducessSaeEmitter<A, B, C, D> {
9188    fn vreducess_sae(&mut self, op0: A, op1: B, op2: C, op3: D);
9189}
9190
9191impl<'a> VreducessSaeEmitter<Xmm, Xmm, Xmm, Imm> for Assembler<'a> {
9192    fn vreducess_sae(&mut self, op0: Xmm, op1: Xmm, op2: Xmm, op3: Imm) {
9193        self.emit(
9194            VREDUCESSRRRI_SAE,
9195            op0.as_operand(),
9196            op1.as_operand(),
9197            op2.as_operand(),
9198            op3.as_operand(),
9199        );
9200    }
9201}
9202
9203/// `VXORPD`.
9204///
9205/// Supported operand variants:
9206///
9207/// ```text
9208/// +---+---------------+
9209/// | # | Operands      |
9210/// +---+---------------+
9211/// | 1 | Xmm, Xmm, Mem |
9212/// | 2 | Xmm, Xmm, Xmm |
9213/// | 3 | Ymm, Ymm, Mem |
9214/// | 4 | Ymm, Ymm, Ymm |
9215/// | 5 | Zmm, Zmm, Mem |
9216/// | 6 | Zmm, Zmm, Zmm |
9217/// +---+---------------+
9218/// ```
9219pub trait VxorpdEmitter<A, B, C> {
9220    fn vxorpd(&mut self, op0: A, op1: B, op2: C);
9221}
9222
9223impl<'a> VxorpdEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
9224    fn vxorpd(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
9225        self.emit(
9226            VXORPD128RRR,
9227            op0.as_operand(),
9228            op1.as_operand(),
9229            op2.as_operand(),
9230            &NOREG,
9231        );
9232    }
9233}
9234
9235impl<'a> VxorpdEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
9236    fn vxorpd(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
9237        self.emit(
9238            VXORPD128RRM,
9239            op0.as_operand(),
9240            op1.as_operand(),
9241            op2.as_operand(),
9242            &NOREG,
9243        );
9244    }
9245}
9246
9247impl<'a> VxorpdEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
9248    fn vxorpd(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
9249        self.emit(
9250            VXORPD256RRR,
9251            op0.as_operand(),
9252            op1.as_operand(),
9253            op2.as_operand(),
9254            &NOREG,
9255        );
9256    }
9257}
9258
9259impl<'a> VxorpdEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
9260    fn vxorpd(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
9261        self.emit(
9262            VXORPD256RRM,
9263            op0.as_operand(),
9264            op1.as_operand(),
9265            op2.as_operand(),
9266            &NOREG,
9267        );
9268    }
9269}
9270
9271impl<'a> VxorpdEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
9272    fn vxorpd(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
9273        self.emit(
9274            VXORPD512RRR,
9275            op0.as_operand(),
9276            op1.as_operand(),
9277            op2.as_operand(),
9278            &NOREG,
9279        );
9280    }
9281}
9282
9283impl<'a> VxorpdEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
9284    fn vxorpd(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
9285        self.emit(
9286            VXORPD512RRM,
9287            op0.as_operand(),
9288            op1.as_operand(),
9289            op2.as_operand(),
9290            &NOREG,
9291        );
9292    }
9293}
9294
9295/// `VXORPD_MASK`.
9296///
9297/// Supported operand variants:
9298///
9299/// ```text
9300/// +---+---------------+
9301/// | # | Operands      |
9302/// +---+---------------+
9303/// | 1 | Xmm, Xmm, Mem |
9304/// | 2 | Xmm, Xmm, Xmm |
9305/// | 3 | Ymm, Ymm, Mem |
9306/// | 4 | Ymm, Ymm, Ymm |
9307/// | 5 | Zmm, Zmm, Mem |
9308/// | 6 | Zmm, Zmm, Zmm |
9309/// +---+---------------+
9310/// ```
9311pub trait VxorpdMaskEmitter<A, B, C> {
9312    fn vxorpd_mask(&mut self, op0: A, op1: B, op2: C);
9313}
9314
9315impl<'a> VxorpdMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
9316    fn vxorpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
9317        self.emit(
9318            VXORPD128RRR_MASK,
9319            op0.as_operand(),
9320            op1.as_operand(),
9321            op2.as_operand(),
9322            &NOREG,
9323        );
9324    }
9325}
9326
9327impl<'a> VxorpdMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
9328    fn vxorpd_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
9329        self.emit(
9330            VXORPD128RRM_MASK,
9331            op0.as_operand(),
9332            op1.as_operand(),
9333            op2.as_operand(),
9334            &NOREG,
9335        );
9336    }
9337}
9338
9339impl<'a> VxorpdMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
9340    fn vxorpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
9341        self.emit(
9342            VXORPD256RRR_MASK,
9343            op0.as_operand(),
9344            op1.as_operand(),
9345            op2.as_operand(),
9346            &NOREG,
9347        );
9348    }
9349}
9350
9351impl<'a> VxorpdMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
9352    fn vxorpd_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
9353        self.emit(
9354            VXORPD256RRM_MASK,
9355            op0.as_operand(),
9356            op1.as_operand(),
9357            op2.as_operand(),
9358            &NOREG,
9359        );
9360    }
9361}
9362
9363impl<'a> VxorpdMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
9364    fn vxorpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
9365        self.emit(
9366            VXORPD512RRR_MASK,
9367            op0.as_operand(),
9368            op1.as_operand(),
9369            op2.as_operand(),
9370            &NOREG,
9371        );
9372    }
9373}
9374
9375impl<'a> VxorpdMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
9376    fn vxorpd_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
9377        self.emit(
9378            VXORPD512RRM_MASK,
9379            op0.as_operand(),
9380            op1.as_operand(),
9381            op2.as_operand(),
9382            &NOREG,
9383        );
9384    }
9385}
9386
9387/// `VXORPD_MASKZ`.
9388///
9389/// Supported operand variants:
9390///
9391/// ```text
9392/// +---+---------------+
9393/// | # | Operands      |
9394/// +---+---------------+
9395/// | 1 | Xmm, Xmm, Mem |
9396/// | 2 | Xmm, Xmm, Xmm |
9397/// | 3 | Ymm, Ymm, Mem |
9398/// | 4 | Ymm, Ymm, Ymm |
9399/// | 5 | Zmm, Zmm, Mem |
9400/// | 6 | Zmm, Zmm, Zmm |
9401/// +---+---------------+
9402/// ```
9403pub trait VxorpdMaskzEmitter<A, B, C> {
9404    fn vxorpd_maskz(&mut self, op0: A, op1: B, op2: C);
9405}
9406
9407impl<'a> VxorpdMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
9408    fn vxorpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
9409        self.emit(
9410            VXORPD128RRR_MASKZ,
9411            op0.as_operand(),
9412            op1.as_operand(),
9413            op2.as_operand(),
9414            &NOREG,
9415        );
9416    }
9417}
9418
9419impl<'a> VxorpdMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
9420    fn vxorpd_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
9421        self.emit(
9422            VXORPD128RRM_MASKZ,
9423            op0.as_operand(),
9424            op1.as_operand(),
9425            op2.as_operand(),
9426            &NOREG,
9427        );
9428    }
9429}
9430
9431impl<'a> VxorpdMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
9432    fn vxorpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
9433        self.emit(
9434            VXORPD256RRR_MASKZ,
9435            op0.as_operand(),
9436            op1.as_operand(),
9437            op2.as_operand(),
9438            &NOREG,
9439        );
9440    }
9441}
9442
9443impl<'a> VxorpdMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
9444    fn vxorpd_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
9445        self.emit(
9446            VXORPD256RRM_MASKZ,
9447            op0.as_operand(),
9448            op1.as_operand(),
9449            op2.as_operand(),
9450            &NOREG,
9451        );
9452    }
9453}
9454
9455impl<'a> VxorpdMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
9456    fn vxorpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
9457        self.emit(
9458            VXORPD512RRR_MASKZ,
9459            op0.as_operand(),
9460            op1.as_operand(),
9461            op2.as_operand(),
9462            &NOREG,
9463        );
9464    }
9465}
9466
9467impl<'a> VxorpdMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
9468    fn vxorpd_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
9469        self.emit(
9470            VXORPD512RRM_MASKZ,
9471            op0.as_operand(),
9472            op1.as_operand(),
9473            op2.as_operand(),
9474            &NOREG,
9475        );
9476    }
9477}
9478
9479/// `VXORPS`.
9480///
9481/// Supported operand variants:
9482///
9483/// ```text
9484/// +---+---------------+
9485/// | # | Operands      |
9486/// +---+---------------+
9487/// | 1 | Xmm, Xmm, Mem |
9488/// | 2 | Xmm, Xmm, Xmm |
9489/// | 3 | Ymm, Ymm, Mem |
9490/// | 4 | Ymm, Ymm, Ymm |
9491/// | 5 | Zmm, Zmm, Mem |
9492/// | 6 | Zmm, Zmm, Zmm |
9493/// +---+---------------+
9494/// ```
9495pub trait VxorpsEmitter<A, B, C> {
9496    fn vxorps(&mut self, op0: A, op1: B, op2: C);
9497}
9498
9499impl<'a> VxorpsEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
9500    fn vxorps(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
9501        self.emit(
9502            VXORPS128RRR,
9503            op0.as_operand(),
9504            op1.as_operand(),
9505            op2.as_operand(),
9506            &NOREG,
9507        );
9508    }
9509}
9510
9511impl<'a> VxorpsEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
9512    fn vxorps(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
9513        self.emit(
9514            VXORPS128RRM,
9515            op0.as_operand(),
9516            op1.as_operand(),
9517            op2.as_operand(),
9518            &NOREG,
9519        );
9520    }
9521}
9522
9523impl<'a> VxorpsEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
9524    fn vxorps(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
9525        self.emit(
9526            VXORPS256RRR,
9527            op0.as_operand(),
9528            op1.as_operand(),
9529            op2.as_operand(),
9530            &NOREG,
9531        );
9532    }
9533}
9534
9535impl<'a> VxorpsEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
9536    fn vxorps(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
9537        self.emit(
9538            VXORPS256RRM,
9539            op0.as_operand(),
9540            op1.as_operand(),
9541            op2.as_operand(),
9542            &NOREG,
9543        );
9544    }
9545}
9546
9547impl<'a> VxorpsEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
9548    fn vxorps(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
9549        self.emit(
9550            VXORPS512RRR,
9551            op0.as_operand(),
9552            op1.as_operand(),
9553            op2.as_operand(),
9554            &NOREG,
9555        );
9556    }
9557}
9558
9559impl<'a> VxorpsEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
9560    fn vxorps(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
9561        self.emit(
9562            VXORPS512RRM,
9563            op0.as_operand(),
9564            op1.as_operand(),
9565            op2.as_operand(),
9566            &NOREG,
9567        );
9568    }
9569}
9570
9571/// `VXORPS_MASK`.
9572///
9573/// Supported operand variants:
9574///
9575/// ```text
9576/// +---+---------------+
9577/// | # | Operands      |
9578/// +---+---------------+
9579/// | 1 | Xmm, Xmm, Mem |
9580/// | 2 | Xmm, Xmm, Xmm |
9581/// | 3 | Ymm, Ymm, Mem |
9582/// | 4 | Ymm, Ymm, Ymm |
9583/// | 5 | Zmm, Zmm, Mem |
9584/// | 6 | Zmm, Zmm, Zmm |
9585/// +---+---------------+
9586/// ```
9587pub trait VxorpsMaskEmitter<A, B, C> {
9588    fn vxorps_mask(&mut self, op0: A, op1: B, op2: C);
9589}
9590
9591impl<'a> VxorpsMaskEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
9592    fn vxorps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
9593        self.emit(
9594            VXORPS128RRR_MASK,
9595            op0.as_operand(),
9596            op1.as_operand(),
9597            op2.as_operand(),
9598            &NOREG,
9599        );
9600    }
9601}
9602
9603impl<'a> VxorpsMaskEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
9604    fn vxorps_mask(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
9605        self.emit(
9606            VXORPS128RRM_MASK,
9607            op0.as_operand(),
9608            op1.as_operand(),
9609            op2.as_operand(),
9610            &NOREG,
9611        );
9612    }
9613}
9614
9615impl<'a> VxorpsMaskEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
9616    fn vxorps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
9617        self.emit(
9618            VXORPS256RRR_MASK,
9619            op0.as_operand(),
9620            op1.as_operand(),
9621            op2.as_operand(),
9622            &NOREG,
9623        );
9624    }
9625}
9626
9627impl<'a> VxorpsMaskEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
9628    fn vxorps_mask(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
9629        self.emit(
9630            VXORPS256RRM_MASK,
9631            op0.as_operand(),
9632            op1.as_operand(),
9633            op2.as_operand(),
9634            &NOREG,
9635        );
9636    }
9637}
9638
9639impl<'a> VxorpsMaskEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
9640    fn vxorps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
9641        self.emit(
9642            VXORPS512RRR_MASK,
9643            op0.as_operand(),
9644            op1.as_operand(),
9645            op2.as_operand(),
9646            &NOREG,
9647        );
9648    }
9649}
9650
9651impl<'a> VxorpsMaskEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
9652    fn vxorps_mask(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
9653        self.emit(
9654            VXORPS512RRM_MASK,
9655            op0.as_operand(),
9656            op1.as_operand(),
9657            op2.as_operand(),
9658            &NOREG,
9659        );
9660    }
9661}
9662
9663/// `VXORPS_MASKZ`.
9664///
9665/// Supported operand variants:
9666///
9667/// ```text
9668/// +---+---------------+
9669/// | # | Operands      |
9670/// +---+---------------+
9671/// | 1 | Xmm, Xmm, Mem |
9672/// | 2 | Xmm, Xmm, Xmm |
9673/// | 3 | Ymm, Ymm, Mem |
9674/// | 4 | Ymm, Ymm, Ymm |
9675/// | 5 | Zmm, Zmm, Mem |
9676/// | 6 | Zmm, Zmm, Zmm |
9677/// +---+---------------+
9678/// ```
9679pub trait VxorpsMaskzEmitter<A, B, C> {
9680    fn vxorps_maskz(&mut self, op0: A, op1: B, op2: C);
9681}
9682
9683impl<'a> VxorpsMaskzEmitter<Xmm, Xmm, Xmm> for Assembler<'a> {
9684    fn vxorps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Xmm) {
9685        self.emit(
9686            VXORPS128RRR_MASKZ,
9687            op0.as_operand(),
9688            op1.as_operand(),
9689            op2.as_operand(),
9690            &NOREG,
9691        );
9692    }
9693}
9694
9695impl<'a> VxorpsMaskzEmitter<Xmm, Xmm, Mem> for Assembler<'a> {
9696    fn vxorps_maskz(&mut self, op0: Xmm, op1: Xmm, op2: Mem) {
9697        self.emit(
9698            VXORPS128RRM_MASKZ,
9699            op0.as_operand(),
9700            op1.as_operand(),
9701            op2.as_operand(),
9702            &NOREG,
9703        );
9704    }
9705}
9706
9707impl<'a> VxorpsMaskzEmitter<Ymm, Ymm, Ymm> for Assembler<'a> {
9708    fn vxorps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Ymm) {
9709        self.emit(
9710            VXORPS256RRR_MASKZ,
9711            op0.as_operand(),
9712            op1.as_operand(),
9713            op2.as_operand(),
9714            &NOREG,
9715        );
9716    }
9717}
9718
9719impl<'a> VxorpsMaskzEmitter<Ymm, Ymm, Mem> for Assembler<'a> {
9720    fn vxorps_maskz(&mut self, op0: Ymm, op1: Ymm, op2: Mem) {
9721        self.emit(
9722            VXORPS256RRM_MASKZ,
9723            op0.as_operand(),
9724            op1.as_operand(),
9725            op2.as_operand(),
9726            &NOREG,
9727        );
9728    }
9729}
9730
9731impl<'a> VxorpsMaskzEmitter<Zmm, Zmm, Zmm> for Assembler<'a> {
9732    fn vxorps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Zmm) {
9733        self.emit(
9734            VXORPS512RRR_MASKZ,
9735            op0.as_operand(),
9736            op1.as_operand(),
9737            op2.as_operand(),
9738            &NOREG,
9739        );
9740    }
9741}
9742
9743impl<'a> VxorpsMaskzEmitter<Zmm, Zmm, Mem> for Assembler<'a> {
9744    fn vxorps_maskz(&mut self, op0: Zmm, op1: Zmm, op2: Mem) {
9745        self.emit(
9746            VXORPS512RRM_MASKZ,
9747            op0.as_operand(),
9748            op1.as_operand(),
9749            op2.as_operand(),
9750            &NOREG,
9751        );
9752    }
9753}
9754
9755impl<'a> Assembler<'a> {
9756    /// `KADDB`.
9757    ///
9758    /// Supported operand variants:
9759    ///
9760    /// ```text
9761    /// +---+------------------+
9762    /// | # | Operands         |
9763    /// +---+------------------+
9764    /// | 1 | KReg, KReg, KReg |
9765    /// +---+------------------+
9766    /// ```
9767    #[inline]
9768    pub fn kaddb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9769    where
9770        Assembler<'a>: KaddbEmitter<A, B, C>,
9771    {
9772        <Self as KaddbEmitter<A, B, C>>::kaddb(self, op0, op1, op2);
9773    }
9774    /// `KADDW`.
9775    ///
9776    /// Supported operand variants:
9777    ///
9778    /// ```text
9779    /// +---+------------------+
9780    /// | # | Operands         |
9781    /// +---+------------------+
9782    /// | 1 | KReg, KReg, KReg |
9783    /// +---+------------------+
9784    /// ```
9785    #[inline]
9786    pub fn kaddw<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9787    where
9788        Assembler<'a>: KaddwEmitter<A, B, C>,
9789    {
9790        <Self as KaddwEmitter<A, B, C>>::kaddw(self, op0, op1, op2);
9791    }
9792    /// `KANDB`.
9793    ///
9794    /// Supported operand variants:
9795    ///
9796    /// ```text
9797    /// +---+------------------+
9798    /// | # | Operands         |
9799    /// +---+------------------+
9800    /// | 1 | KReg, KReg, KReg |
9801    /// +---+------------------+
9802    /// ```
9803    #[inline]
9804    pub fn kandb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9805    where
9806        Assembler<'a>: KandbEmitter<A, B, C>,
9807    {
9808        <Self as KandbEmitter<A, B, C>>::kandb(self, op0, op1, op2);
9809    }
9810    /// `KANDNB`.
9811    ///
9812    /// Supported operand variants:
9813    ///
9814    /// ```text
9815    /// +---+------------------+
9816    /// | # | Operands         |
9817    /// +---+------------------+
9818    /// | 1 | KReg, KReg, KReg |
9819    /// +---+------------------+
9820    /// ```
9821    #[inline]
9822    pub fn kandnb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9823    where
9824        Assembler<'a>: KandnbEmitter<A, B, C>,
9825    {
9826        <Self as KandnbEmitter<A, B, C>>::kandnb(self, op0, op1, op2);
9827    }
9828    /// `KMOVB`.
9829    ///
9830    /// Supported operand variants:
9831    ///
9832    /// ```text
9833    /// +---+------------+
9834    /// | # | Operands   |
9835    /// +---+------------+
9836    /// | 1 | Gpd, KReg  |
9837    /// | 2 | KReg, Gpd  |
9838    /// | 3 | KReg, KReg |
9839    /// | 4 | KReg, Mem  |
9840    /// | 5 | Mem, KReg  |
9841    /// +---+------------+
9842    /// ```
9843    #[inline]
9844    pub fn kmovb<A, B>(&mut self, op0: A, op1: B)
9845    where
9846        Assembler<'a>: KmovbEmitter<A, B>,
9847    {
9848        <Self as KmovbEmitter<A, B>>::kmovb(self, op0, op1);
9849    }
9850    /// `KNOTB`.
9851    ///
9852    /// Supported operand variants:
9853    ///
9854    /// ```text
9855    /// +---+------------+
9856    /// | # | Operands   |
9857    /// +---+------------+
9858    /// | 1 | KReg, KReg |
9859    /// +---+------------+
9860    /// ```
9861    #[inline]
9862    pub fn knotb<A, B>(&mut self, op0: A, op1: B)
9863    where
9864        Assembler<'a>: KnotbEmitter<A, B>,
9865    {
9866        <Self as KnotbEmitter<A, B>>::knotb(self, op0, op1);
9867    }
9868    /// `KORB`.
9869    ///
9870    /// Supported operand variants:
9871    ///
9872    /// ```text
9873    /// +---+------------------+
9874    /// | # | Operands         |
9875    /// +---+------------------+
9876    /// | 1 | KReg, KReg, KReg |
9877    /// +---+------------------+
9878    /// ```
9879    #[inline]
9880    pub fn korb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9881    where
9882        Assembler<'a>: KorbEmitter<A, B, C>,
9883    {
9884        <Self as KorbEmitter<A, B, C>>::korb(self, op0, op1, op2);
9885    }
9886    /// `KORTESTB`.
9887    ///
9888    /// Supported operand variants:
9889    ///
9890    /// ```text
9891    /// +---+------------+
9892    /// | # | Operands   |
9893    /// +---+------------+
9894    /// | 1 | KReg, KReg |
9895    /// +---+------------+
9896    /// ```
9897    #[inline]
9898    pub fn kortestb<A, B>(&mut self, op0: A, op1: B)
9899    where
9900        Assembler<'a>: KortestbEmitter<A, B>,
9901    {
9902        <Self as KortestbEmitter<A, B>>::kortestb(self, op0, op1);
9903    }
9904    /// `KSHIFTLB`.
9905    ///
9906    /// Supported operand variants:
9907    ///
9908    /// ```text
9909    /// +---+-----------------+
9910    /// | # | Operands        |
9911    /// +---+-----------------+
9912    /// | 1 | KReg, KReg, Imm |
9913    /// +---+-----------------+
9914    /// ```
9915    #[inline]
9916    pub fn kshiftlb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9917    where
9918        Assembler<'a>: KshiftlbEmitter<A, B, C>,
9919    {
9920        <Self as KshiftlbEmitter<A, B, C>>::kshiftlb(self, op0, op1, op2);
9921    }
9922    /// `KSHIFTRB`.
9923    ///
9924    /// Supported operand variants:
9925    ///
9926    /// ```text
9927    /// +---+-----------------+
9928    /// | # | Operands        |
9929    /// +---+-----------------+
9930    /// | 1 | KReg, KReg, Imm |
9931    /// +---+-----------------+
9932    /// ```
9933    #[inline]
9934    pub fn kshiftrb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9935    where
9936        Assembler<'a>: KshiftrbEmitter<A, B, C>,
9937    {
9938        <Self as KshiftrbEmitter<A, B, C>>::kshiftrb(self, op0, op1, op2);
9939    }
9940    /// `KTESTB`.
9941    ///
9942    /// Supported operand variants:
9943    ///
9944    /// ```text
9945    /// +---+------------+
9946    /// | # | Operands   |
9947    /// +---+------------+
9948    /// | 1 | KReg, KReg |
9949    /// +---+------------+
9950    /// ```
9951    #[inline]
9952    pub fn ktestb<A, B>(&mut self, op0: A, op1: B)
9953    where
9954        Assembler<'a>: KtestbEmitter<A, B>,
9955    {
9956        <Self as KtestbEmitter<A, B>>::ktestb(self, op0, op1);
9957    }
9958    /// `KTESTW`.
9959    ///
9960    /// Supported operand variants:
9961    ///
9962    /// ```text
9963    /// +---+------------+
9964    /// | # | Operands   |
9965    /// +---+------------+
9966    /// | 1 | KReg, KReg |
9967    /// +---+------------+
9968    /// ```
9969    #[inline]
9970    pub fn ktestw<A, B>(&mut self, op0: A, op1: B)
9971    where
9972        Assembler<'a>: KtestwEmitter<A, B>,
9973    {
9974        <Self as KtestwEmitter<A, B>>::ktestw(self, op0, op1);
9975    }
9976    /// `KXNORB`.
9977    ///
9978    /// Supported operand variants:
9979    ///
9980    /// ```text
9981    /// +---+------------------+
9982    /// | # | Operands         |
9983    /// +---+------------------+
9984    /// | 1 | KReg, KReg, KReg |
9985    /// +---+------------------+
9986    /// ```
9987    #[inline]
9988    pub fn kxnorb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
9989    where
9990        Assembler<'a>: KxnorbEmitter<A, B, C>,
9991    {
9992        <Self as KxnorbEmitter<A, B, C>>::kxnorb(self, op0, op1, op2);
9993    }
9994    /// `KXORB`.
9995    ///
9996    /// Supported operand variants:
9997    ///
9998    /// ```text
9999    /// +---+------------------+
10000    /// | # | Operands         |
10001    /// +---+------------------+
10002    /// | 1 | KReg, KReg, KReg |
10003    /// +---+------------------+
10004    /// ```
10005    #[inline]
10006    pub fn kxorb<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10007    where
10008        Assembler<'a>: KxorbEmitter<A, B, C>,
10009    {
10010        <Self as KxorbEmitter<A, B, C>>::kxorb(self, op0, op1, op2);
10011    }
10012    /// `VANDNPD`.
10013    ///
10014    /// Supported operand variants:
10015    ///
10016    /// ```text
10017    /// +---+---------------+
10018    /// | # | Operands      |
10019    /// +---+---------------+
10020    /// | 1 | Xmm, Xmm, Mem |
10021    /// | 2 | Xmm, Xmm, Xmm |
10022    /// | 3 | Ymm, Ymm, Mem |
10023    /// | 4 | Ymm, Ymm, Ymm |
10024    /// | 5 | Zmm, Zmm, Mem |
10025    /// | 6 | Zmm, Zmm, Zmm |
10026    /// +---+---------------+
10027    /// ```
10028    #[inline]
10029    pub fn vandnpd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10030    where
10031        Assembler<'a>: VandnpdEmitter<A, B, C>,
10032    {
10033        <Self as VandnpdEmitter<A, B, C>>::vandnpd(self, op0, op1, op2);
10034    }
10035    /// `VANDNPD_MASK`.
10036    ///
10037    /// Supported operand variants:
10038    ///
10039    /// ```text
10040    /// +---+---------------+
10041    /// | # | Operands      |
10042    /// +---+---------------+
10043    /// | 1 | Xmm, Xmm, Mem |
10044    /// | 2 | Xmm, Xmm, Xmm |
10045    /// | 3 | Ymm, Ymm, Mem |
10046    /// | 4 | Ymm, Ymm, Ymm |
10047    /// | 5 | Zmm, Zmm, Mem |
10048    /// | 6 | Zmm, Zmm, Zmm |
10049    /// +---+---------------+
10050    /// ```
10051    #[inline]
10052    pub fn vandnpd_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10053    where
10054        Assembler<'a>: VandnpdMaskEmitter<A, B, C>,
10055    {
10056        <Self as VandnpdMaskEmitter<A, B, C>>::vandnpd_mask(self, op0, op1, op2);
10057    }
10058    /// `VANDNPD_MASKZ`.
10059    ///
10060    /// Supported operand variants:
10061    ///
10062    /// ```text
10063    /// +---+---------------+
10064    /// | # | Operands      |
10065    /// +---+---------------+
10066    /// | 1 | Xmm, Xmm, Mem |
10067    /// | 2 | Xmm, Xmm, Xmm |
10068    /// | 3 | Ymm, Ymm, Mem |
10069    /// | 4 | Ymm, Ymm, Ymm |
10070    /// | 5 | Zmm, Zmm, Mem |
10071    /// | 6 | Zmm, Zmm, Zmm |
10072    /// +---+---------------+
10073    /// ```
10074    #[inline]
10075    pub fn vandnpd_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10076    where
10077        Assembler<'a>: VandnpdMaskzEmitter<A, B, C>,
10078    {
10079        <Self as VandnpdMaskzEmitter<A, B, C>>::vandnpd_maskz(self, op0, op1, op2);
10080    }
10081    /// `VANDNPS`.
10082    ///
10083    /// Supported operand variants:
10084    ///
10085    /// ```text
10086    /// +---+---------------+
10087    /// | # | Operands      |
10088    /// +---+---------------+
10089    /// | 1 | Xmm, Xmm, Mem |
10090    /// | 2 | Xmm, Xmm, Xmm |
10091    /// | 3 | Ymm, Ymm, Mem |
10092    /// | 4 | Ymm, Ymm, Ymm |
10093    /// | 5 | Zmm, Zmm, Mem |
10094    /// | 6 | Zmm, Zmm, Zmm |
10095    /// +---+---------------+
10096    /// ```
10097    #[inline]
10098    pub fn vandnps<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10099    where
10100        Assembler<'a>: VandnpsEmitter<A, B, C>,
10101    {
10102        <Self as VandnpsEmitter<A, B, C>>::vandnps(self, op0, op1, op2);
10103    }
10104    /// `VANDNPS_MASK`.
10105    ///
10106    /// Supported operand variants:
10107    ///
10108    /// ```text
10109    /// +---+---------------+
10110    /// | # | Operands      |
10111    /// +---+---------------+
10112    /// | 1 | Xmm, Xmm, Mem |
10113    /// | 2 | Xmm, Xmm, Xmm |
10114    /// | 3 | Ymm, Ymm, Mem |
10115    /// | 4 | Ymm, Ymm, Ymm |
10116    /// | 5 | Zmm, Zmm, Mem |
10117    /// | 6 | Zmm, Zmm, Zmm |
10118    /// +---+---------------+
10119    /// ```
10120    #[inline]
10121    pub fn vandnps_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10122    where
10123        Assembler<'a>: VandnpsMaskEmitter<A, B, C>,
10124    {
10125        <Self as VandnpsMaskEmitter<A, B, C>>::vandnps_mask(self, op0, op1, op2);
10126    }
10127    /// `VANDNPS_MASKZ`.
10128    ///
10129    /// Supported operand variants:
10130    ///
10131    /// ```text
10132    /// +---+---------------+
10133    /// | # | Operands      |
10134    /// +---+---------------+
10135    /// | 1 | Xmm, Xmm, Mem |
10136    /// | 2 | Xmm, Xmm, Xmm |
10137    /// | 3 | Ymm, Ymm, Mem |
10138    /// | 4 | Ymm, Ymm, Ymm |
10139    /// | 5 | Zmm, Zmm, Mem |
10140    /// | 6 | Zmm, Zmm, Zmm |
10141    /// +---+---------------+
10142    /// ```
10143    #[inline]
10144    pub fn vandnps_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10145    where
10146        Assembler<'a>: VandnpsMaskzEmitter<A, B, C>,
10147    {
10148        <Self as VandnpsMaskzEmitter<A, B, C>>::vandnps_maskz(self, op0, op1, op2);
10149    }
10150    /// `VANDPD`.
10151    ///
10152    /// Supported operand variants:
10153    ///
10154    /// ```text
10155    /// +---+---------------+
10156    /// | # | Operands      |
10157    /// +---+---------------+
10158    /// | 1 | Xmm, Xmm, Mem |
10159    /// | 2 | Xmm, Xmm, Xmm |
10160    /// | 3 | Ymm, Ymm, Mem |
10161    /// | 4 | Ymm, Ymm, Ymm |
10162    /// | 5 | Zmm, Zmm, Mem |
10163    /// | 6 | Zmm, Zmm, Zmm |
10164    /// +---+---------------+
10165    /// ```
10166    #[inline]
10167    pub fn vandpd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10168    where
10169        Assembler<'a>: VandpdEmitter<A, B, C>,
10170    {
10171        <Self as VandpdEmitter<A, B, C>>::vandpd(self, op0, op1, op2);
10172    }
10173    /// `VANDPD_MASK`.
10174    ///
10175    /// Supported operand variants:
10176    ///
10177    /// ```text
10178    /// +---+---------------+
10179    /// | # | Operands      |
10180    /// +---+---------------+
10181    /// | 1 | Xmm, Xmm, Mem |
10182    /// | 2 | Xmm, Xmm, Xmm |
10183    /// | 3 | Ymm, Ymm, Mem |
10184    /// | 4 | Ymm, Ymm, Ymm |
10185    /// | 5 | Zmm, Zmm, Mem |
10186    /// | 6 | Zmm, Zmm, Zmm |
10187    /// +---+---------------+
10188    /// ```
10189    #[inline]
10190    pub fn vandpd_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10191    where
10192        Assembler<'a>: VandpdMaskEmitter<A, B, C>,
10193    {
10194        <Self as VandpdMaskEmitter<A, B, C>>::vandpd_mask(self, op0, op1, op2);
10195    }
10196    /// `VANDPD_MASKZ`.
10197    ///
10198    /// Supported operand variants:
10199    ///
10200    /// ```text
10201    /// +---+---------------+
10202    /// | # | Operands      |
10203    /// +---+---------------+
10204    /// | 1 | Xmm, Xmm, Mem |
10205    /// | 2 | Xmm, Xmm, Xmm |
10206    /// | 3 | Ymm, Ymm, Mem |
10207    /// | 4 | Ymm, Ymm, Ymm |
10208    /// | 5 | Zmm, Zmm, Mem |
10209    /// | 6 | Zmm, Zmm, Zmm |
10210    /// +---+---------------+
10211    /// ```
10212    #[inline]
10213    pub fn vandpd_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10214    where
10215        Assembler<'a>: VandpdMaskzEmitter<A, B, C>,
10216    {
10217        <Self as VandpdMaskzEmitter<A, B, C>>::vandpd_maskz(self, op0, op1, op2);
10218    }
10219    /// `VANDPS`.
10220    ///
10221    /// Supported operand variants:
10222    ///
10223    /// ```text
10224    /// +---+---------------+
10225    /// | # | Operands      |
10226    /// +---+---------------+
10227    /// | 1 | Xmm, Xmm, Mem |
10228    /// | 2 | Xmm, Xmm, Xmm |
10229    /// | 3 | Ymm, Ymm, Mem |
10230    /// | 4 | Ymm, Ymm, Ymm |
10231    /// | 5 | Zmm, Zmm, Mem |
10232    /// | 6 | Zmm, Zmm, Zmm |
10233    /// +---+---------------+
10234    /// ```
10235    #[inline]
10236    pub fn vandps<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10237    where
10238        Assembler<'a>: VandpsEmitter<A, B, C>,
10239    {
10240        <Self as VandpsEmitter<A, B, C>>::vandps(self, op0, op1, op2);
10241    }
10242    /// `VANDPS_MASK`.
10243    ///
10244    /// Supported operand variants:
10245    ///
10246    /// ```text
10247    /// +---+---------------+
10248    /// | # | Operands      |
10249    /// +---+---------------+
10250    /// | 1 | Xmm, Xmm, Mem |
10251    /// | 2 | Xmm, Xmm, Xmm |
10252    /// | 3 | Ymm, Ymm, Mem |
10253    /// | 4 | Ymm, Ymm, Ymm |
10254    /// | 5 | Zmm, Zmm, Mem |
10255    /// | 6 | Zmm, Zmm, Zmm |
10256    /// +---+---------------+
10257    /// ```
10258    #[inline]
10259    pub fn vandps_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10260    where
10261        Assembler<'a>: VandpsMaskEmitter<A, B, C>,
10262    {
10263        <Self as VandpsMaskEmitter<A, B, C>>::vandps_mask(self, op0, op1, op2);
10264    }
10265    /// `VANDPS_MASKZ`.
10266    ///
10267    /// Supported operand variants:
10268    ///
10269    /// ```text
10270    /// +---+---------------+
10271    /// | # | Operands      |
10272    /// +---+---------------+
10273    /// | 1 | Xmm, Xmm, Mem |
10274    /// | 2 | Xmm, Xmm, Xmm |
10275    /// | 3 | Ymm, Ymm, Mem |
10276    /// | 4 | Ymm, Ymm, Ymm |
10277    /// | 5 | Zmm, Zmm, Mem |
10278    /// | 6 | Zmm, Zmm, Zmm |
10279    /// +---+---------------+
10280    /// ```
10281    #[inline]
10282    pub fn vandps_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
10283    where
10284        Assembler<'a>: VandpsMaskzEmitter<A, B, C>,
10285    {
10286        <Self as VandpsMaskzEmitter<A, B, C>>::vandps_maskz(self, op0, op1, op2);
10287    }
10288    /// `VBROADCASTF32X2`.
10289    ///
10290    /// Supported operand variants:
10291    ///
10292    /// ```text
10293    /// +---+----------+
10294    /// | # | Operands |
10295    /// +---+----------+
10296    /// | 1 | Ymm, Mem |
10297    /// | 2 | Ymm, Xmm |
10298    /// | 3 | Zmm, Mem |
10299    /// | 4 | Zmm, Xmm |
10300    /// +---+----------+
10301    /// ```
10302    #[inline]
10303    pub fn vbroadcastf32x2<A, B>(&mut self, op0: A, op1: B)
10304    where
10305        Assembler<'a>: Vbroadcastf32x2Emitter<A, B>,
10306    {
10307        <Self as Vbroadcastf32x2Emitter<A, B>>::vbroadcastf32x2(self, op0, op1);
10308    }
10309    /// `VBROADCASTF32X2_MASK`.
10310    ///
10311    /// Supported operand variants:
10312    ///
10313    /// ```text
10314    /// +---+----------+
10315    /// | # | Operands |
10316    /// +---+----------+
10317    /// | 1 | Ymm, Mem |
10318    /// | 2 | Ymm, Xmm |
10319    /// | 3 | Zmm, Mem |
10320    /// | 4 | Zmm, Xmm |
10321    /// +---+----------+
10322    /// ```
10323    #[inline]
10324    pub fn vbroadcastf32x2_mask<A, B>(&mut self, op0: A, op1: B)
10325    where
10326        Assembler<'a>: Vbroadcastf32x2MaskEmitter<A, B>,
10327    {
10328        <Self as Vbroadcastf32x2MaskEmitter<A, B>>::vbroadcastf32x2_mask(self, op0, op1);
10329    }
10330    /// `VBROADCASTF32X2_MASKZ`.
10331    ///
10332    /// Supported operand variants:
10333    ///
10334    /// ```text
10335    /// +---+----------+
10336    /// | # | Operands |
10337    /// +---+----------+
10338    /// | 1 | Ymm, Mem |
10339    /// | 2 | Ymm, Xmm |
10340    /// | 3 | Zmm, Mem |
10341    /// | 4 | Zmm, Xmm |
10342    /// +---+----------+
10343    /// ```
10344    #[inline]
10345    pub fn vbroadcastf32x2_maskz<A, B>(&mut self, op0: A, op1: B)
10346    where
10347        Assembler<'a>: Vbroadcastf32x2MaskzEmitter<A, B>,
10348    {
10349        <Self as Vbroadcastf32x2MaskzEmitter<A, B>>::vbroadcastf32x2_maskz(self, op0, op1);
10350    }
10351    /// `VBROADCASTF32X8`.
10352    ///
10353    /// Supported operand variants:
10354    ///
10355    /// ```text
10356    /// +---+----------+
10357    /// | # | Operands |
10358    /// +---+----------+
10359    /// | 1 | Zmm, Mem |
10360    /// +---+----------+
10361    /// ```
10362    #[inline]
10363    pub fn vbroadcastf32x8<A, B>(&mut self, op0: A, op1: B)
10364    where
10365        Assembler<'a>: Vbroadcastf32x8Emitter<A, B>,
10366    {
10367        <Self as Vbroadcastf32x8Emitter<A, B>>::vbroadcastf32x8(self, op0, op1);
10368    }
10369    /// `VBROADCASTF32X8_MASK`.
10370    ///
10371    /// Supported operand variants:
10372    ///
10373    /// ```text
10374    /// +---+----------+
10375    /// | # | Operands |
10376    /// +---+----------+
10377    /// | 1 | Zmm, Mem |
10378    /// +---+----------+
10379    /// ```
10380    #[inline]
10381    pub fn vbroadcastf32x8_mask<A, B>(&mut self, op0: A, op1: B)
10382    where
10383        Assembler<'a>: Vbroadcastf32x8MaskEmitter<A, B>,
10384    {
10385        <Self as Vbroadcastf32x8MaskEmitter<A, B>>::vbroadcastf32x8_mask(self, op0, op1);
10386    }
10387    /// `VBROADCASTF32X8_MASKZ`.
10388    ///
10389    /// Supported operand variants:
10390    ///
10391    /// ```text
10392    /// +---+----------+
10393    /// | # | Operands |
10394    /// +---+----------+
10395    /// | 1 | Zmm, Mem |
10396    /// +---+----------+
10397    /// ```
10398    #[inline]
10399    pub fn vbroadcastf32x8_maskz<A, B>(&mut self, op0: A, op1: B)
10400    where
10401        Assembler<'a>: Vbroadcastf32x8MaskzEmitter<A, B>,
10402    {
10403        <Self as Vbroadcastf32x8MaskzEmitter<A, B>>::vbroadcastf32x8_maskz(self, op0, op1);
10404    }
10405    /// `VBROADCASTF64X2`.
10406    ///
10407    /// Supported operand variants:
10408    ///
10409    /// ```text
10410    /// +---+----------+
10411    /// | # | Operands |
10412    /// +---+----------+
10413    /// | 1 | Ymm, Mem |
10414    /// | 2 | Zmm, Mem |
10415    /// +---+----------+
10416    /// ```
10417    #[inline]
10418    pub fn vbroadcastf64x2<A, B>(&mut self, op0: A, op1: B)
10419    where
10420        Assembler<'a>: Vbroadcastf64x2Emitter<A, B>,
10421    {
10422        <Self as Vbroadcastf64x2Emitter<A, B>>::vbroadcastf64x2(self, op0, op1);
10423    }
10424    /// `VBROADCASTF64X2_MASK`.
10425    ///
10426    /// Supported operand variants:
10427    ///
10428    /// ```text
10429    /// +---+----------+
10430    /// | # | Operands |
10431    /// +---+----------+
10432    /// | 1 | Ymm, Mem |
10433    /// | 2 | Zmm, Mem |
10434    /// +---+----------+
10435    /// ```
10436    #[inline]
10437    pub fn vbroadcastf64x2_mask<A, B>(&mut self, op0: A, op1: B)
10438    where
10439        Assembler<'a>: Vbroadcastf64x2MaskEmitter<A, B>,
10440    {
10441        <Self as Vbroadcastf64x2MaskEmitter<A, B>>::vbroadcastf64x2_mask(self, op0, op1);
10442    }
10443    /// `VBROADCASTF64X2_MASKZ`.
10444    ///
10445    /// Supported operand variants:
10446    ///
10447    /// ```text
10448    /// +---+----------+
10449    /// | # | Operands |
10450    /// +---+----------+
10451    /// | 1 | Ymm, Mem |
10452    /// | 2 | Zmm, Mem |
10453    /// +---+----------+
10454    /// ```
10455    #[inline]
10456    pub fn vbroadcastf64x2_maskz<A, B>(&mut self, op0: A, op1: B)
10457    where
10458        Assembler<'a>: Vbroadcastf64x2MaskzEmitter<A, B>,
10459    {
10460        <Self as Vbroadcastf64x2MaskzEmitter<A, B>>::vbroadcastf64x2_maskz(self, op0, op1);
10461    }
10462    /// `VBROADCASTI32X2`.
10463    ///
10464    /// Supported operand variants:
10465    ///
10466    /// ```text
10467    /// +---+----------+
10468    /// | # | Operands |
10469    /// +---+----------+
10470    /// | 1 | Xmm, Mem |
10471    /// | 2 | Xmm, Xmm |
10472    /// | 3 | Ymm, Mem |
10473    /// | 4 | Ymm, Xmm |
10474    /// | 5 | Zmm, Mem |
10475    /// | 6 | Zmm, Xmm |
10476    /// +---+----------+
10477    /// ```
10478    #[inline]
10479    pub fn vbroadcasti32x2<A, B>(&mut self, op0: A, op1: B)
10480    where
10481        Assembler<'a>: Vbroadcasti32x2Emitter<A, B>,
10482    {
10483        <Self as Vbroadcasti32x2Emitter<A, B>>::vbroadcasti32x2(self, op0, op1);
10484    }
10485    /// `VBROADCASTI32X2_MASK`.
10486    ///
10487    /// Supported operand variants:
10488    ///
10489    /// ```text
10490    /// +---+----------+
10491    /// | # | Operands |
10492    /// +---+----------+
10493    /// | 1 | Xmm, Mem |
10494    /// | 2 | Xmm, Xmm |
10495    /// | 3 | Ymm, Mem |
10496    /// | 4 | Ymm, Xmm |
10497    /// | 5 | Zmm, Mem |
10498    /// | 6 | Zmm, Xmm |
10499    /// +---+----------+
10500    /// ```
10501    #[inline]
10502    pub fn vbroadcasti32x2_mask<A, B>(&mut self, op0: A, op1: B)
10503    where
10504        Assembler<'a>: Vbroadcasti32x2MaskEmitter<A, B>,
10505    {
10506        <Self as Vbroadcasti32x2MaskEmitter<A, B>>::vbroadcasti32x2_mask(self, op0, op1);
10507    }
10508    /// `VBROADCASTI32X2_MASKZ`.
10509    ///
10510    /// Supported operand variants:
10511    ///
10512    /// ```text
10513    /// +---+----------+
10514    /// | # | Operands |
10515    /// +---+----------+
10516    /// | 1 | Xmm, Mem |
10517    /// | 2 | Xmm, Xmm |
10518    /// | 3 | Ymm, Mem |
10519    /// | 4 | Ymm, Xmm |
10520    /// | 5 | Zmm, Mem |
10521    /// | 6 | Zmm, Xmm |
10522    /// +---+----------+
10523    /// ```
10524    #[inline]
10525    pub fn vbroadcasti32x2_maskz<A, B>(&mut self, op0: A, op1: B)
10526    where
10527        Assembler<'a>: Vbroadcasti32x2MaskzEmitter<A, B>,
10528    {
10529        <Self as Vbroadcasti32x2MaskzEmitter<A, B>>::vbroadcasti32x2_maskz(self, op0, op1);
10530    }
10531    /// `VBROADCASTI32X4`.
10532    ///
10533    /// Supported operand variants:
10534    ///
10535    /// ```text
10536    /// +---+----------+
10537    /// | # | Operands |
10538    /// +---+----------+
10539    /// | 1 | Ymm, Mem |
10540    /// | 2 | Zmm, Mem |
10541    /// +---+----------+
10542    /// ```
10543    #[inline]
10544    pub fn vbroadcasti32x4<A, B>(&mut self, op0: A, op1: B)
10545    where
10546        Assembler<'a>: Vbroadcasti32x4Emitter<A, B>,
10547    {
10548        <Self as Vbroadcasti32x4Emitter<A, B>>::vbroadcasti32x4(self, op0, op1);
10549    }
10550    /// `VBROADCASTI32X4_MASK`.
10551    ///
10552    /// Supported operand variants:
10553    ///
10554    /// ```text
10555    /// +---+----------+
10556    /// | # | Operands |
10557    /// +---+----------+
10558    /// | 1 | Ymm, Mem |
10559    /// | 2 | Zmm, Mem |
10560    /// +---+----------+
10561    /// ```
10562    #[inline]
10563    pub fn vbroadcasti32x4_mask<A, B>(&mut self, op0: A, op1: B)
10564    where
10565        Assembler<'a>: Vbroadcasti32x4MaskEmitter<A, B>,
10566    {
10567        <Self as Vbroadcasti32x4MaskEmitter<A, B>>::vbroadcasti32x4_mask(self, op0, op1);
10568    }
10569    /// `VBROADCASTI32X4_MASKZ`.
10570    ///
10571    /// Supported operand variants:
10572    ///
10573    /// ```text
10574    /// +---+----------+
10575    /// | # | Operands |
10576    /// +---+----------+
10577    /// | 1 | Ymm, Mem |
10578    /// | 2 | Zmm, Mem |
10579    /// +---+----------+
10580    /// ```
10581    #[inline]
10582    pub fn vbroadcasti32x4_maskz<A, B>(&mut self, op0: A, op1: B)
10583    where
10584        Assembler<'a>: Vbroadcasti32x4MaskzEmitter<A, B>,
10585    {
10586        <Self as Vbroadcasti32x4MaskzEmitter<A, B>>::vbroadcasti32x4_maskz(self, op0, op1);
10587    }
10588    /// `VBROADCASTI32X8`.
10589    ///
10590    /// Supported operand variants:
10591    ///
10592    /// ```text
10593    /// +---+----------+
10594    /// | # | Operands |
10595    /// +---+----------+
10596    /// | 1 | Zmm, Mem |
10597    /// +---+----------+
10598    /// ```
10599    #[inline]
10600    pub fn vbroadcasti32x8<A, B>(&mut self, op0: A, op1: B)
10601    where
10602        Assembler<'a>: Vbroadcasti32x8Emitter<A, B>,
10603    {
10604        <Self as Vbroadcasti32x8Emitter<A, B>>::vbroadcasti32x8(self, op0, op1);
10605    }
10606    /// `VBROADCASTI32X8_MASK`.
10607    ///
10608    /// Supported operand variants:
10609    ///
10610    /// ```text
10611    /// +---+----------+
10612    /// | # | Operands |
10613    /// +---+----------+
10614    /// | 1 | Zmm, Mem |
10615    /// +---+----------+
10616    /// ```
10617    #[inline]
10618    pub fn vbroadcasti32x8_mask<A, B>(&mut self, op0: A, op1: B)
10619    where
10620        Assembler<'a>: Vbroadcasti32x8MaskEmitter<A, B>,
10621    {
10622        <Self as Vbroadcasti32x8MaskEmitter<A, B>>::vbroadcasti32x8_mask(self, op0, op1);
10623    }
10624    /// `VBROADCASTI32X8_MASKZ`.
10625    ///
10626    /// Supported operand variants:
10627    ///
10628    /// ```text
10629    /// +---+----------+
10630    /// | # | Operands |
10631    /// +---+----------+
10632    /// | 1 | Zmm, Mem |
10633    /// +---+----------+
10634    /// ```
10635    #[inline]
10636    pub fn vbroadcasti32x8_maskz<A, B>(&mut self, op0: A, op1: B)
10637    where
10638        Assembler<'a>: Vbroadcasti32x8MaskzEmitter<A, B>,
10639    {
10640        <Self as Vbroadcasti32x8MaskzEmitter<A, B>>::vbroadcasti32x8_maskz(self, op0, op1);
10641    }
10642    /// `VBROADCASTI64X2`.
10643    ///
10644    /// Supported operand variants:
10645    ///
10646    /// ```text
10647    /// +---+----------+
10648    /// | # | Operands |
10649    /// +---+----------+
10650    /// | 1 | Ymm, Mem |
10651    /// | 2 | Zmm, Mem |
10652    /// +---+----------+
10653    /// ```
10654    #[inline]
10655    pub fn vbroadcasti64x2<A, B>(&mut self, op0: A, op1: B)
10656    where
10657        Assembler<'a>: Vbroadcasti64x2Emitter<A, B>,
10658    {
10659        <Self as Vbroadcasti64x2Emitter<A, B>>::vbroadcasti64x2(self, op0, op1);
10660    }
10661    /// `VBROADCASTI64X2_MASK`.
10662    ///
10663    /// Supported operand variants:
10664    ///
10665    /// ```text
10666    /// +---+----------+
10667    /// | # | Operands |
10668    /// +---+----------+
10669    /// | 1 | Ymm, Mem |
10670    /// | 2 | Zmm, Mem |
10671    /// +---+----------+
10672    /// ```
10673    #[inline]
10674    pub fn vbroadcasti64x2_mask<A, B>(&mut self, op0: A, op1: B)
10675    where
10676        Assembler<'a>: Vbroadcasti64x2MaskEmitter<A, B>,
10677    {
10678        <Self as Vbroadcasti64x2MaskEmitter<A, B>>::vbroadcasti64x2_mask(self, op0, op1);
10679    }
10680    /// `VBROADCASTI64X2_MASKZ`.
10681    ///
10682    /// Supported operand variants:
10683    ///
10684    /// ```text
10685    /// +---+----------+
10686    /// | # | Operands |
10687    /// +---+----------+
10688    /// | 1 | Ymm, Mem |
10689    /// | 2 | Zmm, Mem |
10690    /// +---+----------+
10691    /// ```
10692    #[inline]
10693    pub fn vbroadcasti64x2_maskz<A, B>(&mut self, op0: A, op1: B)
10694    where
10695        Assembler<'a>: Vbroadcasti64x2MaskzEmitter<A, B>,
10696    {
10697        <Self as Vbroadcasti64x2MaskzEmitter<A, B>>::vbroadcasti64x2_maskz(self, op0, op1);
10698    }
10699    /// `VCVTPD2QQ`.
10700    ///
10701    /// Supported operand variants:
10702    ///
10703    /// ```text
10704    /// +---+----------+
10705    /// | # | Operands |
10706    /// +---+----------+
10707    /// | 1 | Xmm, Mem |
10708    /// | 2 | Xmm, Xmm |
10709    /// | 3 | Ymm, Mem |
10710    /// | 4 | Ymm, Ymm |
10711    /// | 5 | Zmm, Mem |
10712    /// | 6 | Zmm, Zmm |
10713    /// +---+----------+
10714    /// ```
10715    #[inline]
10716    pub fn vcvtpd2qq<A, B>(&mut self, op0: A, op1: B)
10717    where
10718        Assembler<'a>: Vcvtpd2qqEmitter<A, B>,
10719    {
10720        <Self as Vcvtpd2qqEmitter<A, B>>::vcvtpd2qq(self, op0, op1);
10721    }
10722    /// `VCVTPD2QQ_ER`.
10723    ///
10724    /// Supported operand variants:
10725    ///
10726    /// ```text
10727    /// +---+----------+
10728    /// | # | Operands |
10729    /// +---+----------+
10730    /// | 1 | Zmm, Zmm |
10731    /// +---+----------+
10732    /// ```
10733    #[inline]
10734    pub fn vcvtpd2qq_er<A, B>(&mut self, op0: A, op1: B)
10735    where
10736        Assembler<'a>: Vcvtpd2qqErEmitter<A, B>,
10737    {
10738        <Self as Vcvtpd2qqErEmitter<A, B>>::vcvtpd2qq_er(self, op0, op1);
10739    }
10740    /// `VCVTPD2QQ_MASK`.
10741    ///
10742    /// Supported operand variants:
10743    ///
10744    /// ```text
10745    /// +---+----------+
10746    /// | # | Operands |
10747    /// +---+----------+
10748    /// | 1 | Xmm, Mem |
10749    /// | 2 | Xmm, Xmm |
10750    /// | 3 | Ymm, Mem |
10751    /// | 4 | Ymm, Ymm |
10752    /// | 5 | Zmm, Mem |
10753    /// | 6 | Zmm, Zmm |
10754    /// +---+----------+
10755    /// ```
10756    #[inline]
10757    pub fn vcvtpd2qq_mask<A, B>(&mut self, op0: A, op1: B)
10758    where
10759        Assembler<'a>: Vcvtpd2qqMaskEmitter<A, B>,
10760    {
10761        <Self as Vcvtpd2qqMaskEmitter<A, B>>::vcvtpd2qq_mask(self, op0, op1);
10762    }
10763    /// `VCVTPD2QQ_MASK_ER`.
10764    ///
10765    /// Supported operand variants:
10766    ///
10767    /// ```text
10768    /// +---+----------+
10769    /// | # | Operands |
10770    /// +---+----------+
10771    /// | 1 | Zmm, Zmm |
10772    /// +---+----------+
10773    /// ```
10774    #[inline]
10775    pub fn vcvtpd2qq_mask_er<A, B>(&mut self, op0: A, op1: B)
10776    where
10777        Assembler<'a>: Vcvtpd2qqMaskErEmitter<A, B>,
10778    {
10779        <Self as Vcvtpd2qqMaskErEmitter<A, B>>::vcvtpd2qq_mask_er(self, op0, op1);
10780    }
10781    /// `VCVTPD2QQ_MASKZ`.
10782    ///
10783    /// Supported operand variants:
10784    ///
10785    /// ```text
10786    /// +---+----------+
10787    /// | # | Operands |
10788    /// +---+----------+
10789    /// | 1 | Xmm, Mem |
10790    /// | 2 | Xmm, Xmm |
10791    /// | 3 | Ymm, Mem |
10792    /// | 4 | Ymm, Ymm |
10793    /// | 5 | Zmm, Mem |
10794    /// | 6 | Zmm, Zmm |
10795    /// +---+----------+
10796    /// ```
10797    #[inline]
10798    pub fn vcvtpd2qq_maskz<A, B>(&mut self, op0: A, op1: B)
10799    where
10800        Assembler<'a>: Vcvtpd2qqMaskzEmitter<A, B>,
10801    {
10802        <Self as Vcvtpd2qqMaskzEmitter<A, B>>::vcvtpd2qq_maskz(self, op0, op1);
10803    }
10804    /// `VCVTPD2QQ_MASKZ_ER`.
10805    ///
10806    /// Supported operand variants:
10807    ///
10808    /// ```text
10809    /// +---+----------+
10810    /// | # | Operands |
10811    /// +---+----------+
10812    /// | 1 | Zmm, Zmm |
10813    /// +---+----------+
10814    /// ```
10815    #[inline]
10816    pub fn vcvtpd2qq_maskz_er<A, B>(&mut self, op0: A, op1: B)
10817    where
10818        Assembler<'a>: Vcvtpd2qqMaskzErEmitter<A, B>,
10819    {
10820        <Self as Vcvtpd2qqMaskzErEmitter<A, B>>::vcvtpd2qq_maskz_er(self, op0, op1);
10821    }
10822    /// `VCVTPS2QQ`.
10823    ///
10824    /// Supported operand variants:
10825    ///
10826    /// ```text
10827    /// +---+----------+
10828    /// | # | Operands |
10829    /// +---+----------+
10830    /// | 1 | Xmm, Mem |
10831    /// | 2 | Xmm, Xmm |
10832    /// | 3 | Ymm, Mem |
10833    /// | 4 | Ymm, Xmm |
10834    /// | 5 | Zmm, Mem |
10835    /// | 6 | Zmm, Ymm |
10836    /// +---+----------+
10837    /// ```
10838    #[inline]
10839    pub fn vcvtps2qq<A, B>(&mut self, op0: A, op1: B)
10840    where
10841        Assembler<'a>: Vcvtps2qqEmitter<A, B>,
10842    {
10843        <Self as Vcvtps2qqEmitter<A, B>>::vcvtps2qq(self, op0, op1);
10844    }
10845    /// `VCVTPS2QQ_ER`.
10846    ///
10847    /// Supported operand variants:
10848    ///
10849    /// ```text
10850    /// +---+----------+
10851    /// | # | Operands |
10852    /// +---+----------+
10853    /// | 1 | Zmm, Ymm |
10854    /// +---+----------+
10855    /// ```
10856    #[inline]
10857    pub fn vcvtps2qq_er<A, B>(&mut self, op0: A, op1: B)
10858    where
10859        Assembler<'a>: Vcvtps2qqErEmitter<A, B>,
10860    {
10861        <Self as Vcvtps2qqErEmitter<A, B>>::vcvtps2qq_er(self, op0, op1);
10862    }
10863    /// `VCVTPS2QQ_MASK`.
10864    ///
10865    /// Supported operand variants:
10866    ///
10867    /// ```text
10868    /// +---+----------+
10869    /// | # | Operands |
10870    /// +---+----------+
10871    /// | 1 | Xmm, Mem |
10872    /// | 2 | Xmm, Xmm |
10873    /// | 3 | Ymm, Mem |
10874    /// | 4 | Ymm, Xmm |
10875    /// | 5 | Zmm, Mem |
10876    /// | 6 | Zmm, Ymm |
10877    /// +---+----------+
10878    /// ```
10879    #[inline]
10880    pub fn vcvtps2qq_mask<A, B>(&mut self, op0: A, op1: B)
10881    where
10882        Assembler<'a>: Vcvtps2qqMaskEmitter<A, B>,
10883    {
10884        <Self as Vcvtps2qqMaskEmitter<A, B>>::vcvtps2qq_mask(self, op0, op1);
10885    }
10886    /// `VCVTPS2QQ_MASK_ER`.
10887    ///
10888    /// Supported operand variants:
10889    ///
10890    /// ```text
10891    /// +---+----------+
10892    /// | # | Operands |
10893    /// +---+----------+
10894    /// | 1 | Zmm, Ymm |
10895    /// +---+----------+
10896    /// ```
10897    #[inline]
10898    pub fn vcvtps2qq_mask_er<A, B>(&mut self, op0: A, op1: B)
10899    where
10900        Assembler<'a>: Vcvtps2qqMaskErEmitter<A, B>,
10901    {
10902        <Self as Vcvtps2qqMaskErEmitter<A, B>>::vcvtps2qq_mask_er(self, op0, op1);
10903    }
10904    /// `VCVTPS2QQ_MASKZ`.
10905    ///
10906    /// Supported operand variants:
10907    ///
10908    /// ```text
10909    /// +---+----------+
10910    /// | # | Operands |
10911    /// +---+----------+
10912    /// | 1 | Xmm, Mem |
10913    /// | 2 | Xmm, Xmm |
10914    /// | 3 | Ymm, Mem |
10915    /// | 4 | Ymm, Xmm |
10916    /// | 5 | Zmm, Mem |
10917    /// | 6 | Zmm, Ymm |
10918    /// +---+----------+
10919    /// ```
10920    #[inline]
10921    pub fn vcvtps2qq_maskz<A, B>(&mut self, op0: A, op1: B)
10922    where
10923        Assembler<'a>: Vcvtps2qqMaskzEmitter<A, B>,
10924    {
10925        <Self as Vcvtps2qqMaskzEmitter<A, B>>::vcvtps2qq_maskz(self, op0, op1);
10926    }
10927    /// `VCVTPS2QQ_MASKZ_ER`.
10928    ///
10929    /// Supported operand variants:
10930    ///
10931    /// ```text
10932    /// +---+----------+
10933    /// | # | Operands |
10934    /// +---+----------+
10935    /// | 1 | Zmm, Ymm |
10936    /// +---+----------+
10937    /// ```
10938    #[inline]
10939    pub fn vcvtps2qq_maskz_er<A, B>(&mut self, op0: A, op1: B)
10940    where
10941        Assembler<'a>: Vcvtps2qqMaskzErEmitter<A, B>,
10942    {
10943        <Self as Vcvtps2qqMaskzErEmitter<A, B>>::vcvtps2qq_maskz_er(self, op0, op1);
10944    }
10945    /// `VCVTQQ2PD`.
10946    ///
10947    /// Supported operand variants:
10948    ///
10949    /// ```text
10950    /// +---+----------+
10951    /// | # | Operands |
10952    /// +---+----------+
10953    /// | 1 | Xmm, Mem |
10954    /// | 2 | Xmm, Xmm |
10955    /// | 3 | Ymm, Mem |
10956    /// | 4 | Ymm, Ymm |
10957    /// | 5 | Zmm, Mem |
10958    /// | 6 | Zmm, Zmm |
10959    /// +---+----------+
10960    /// ```
10961    #[inline]
10962    pub fn vcvtqq2pd<A, B>(&mut self, op0: A, op1: B)
10963    where
10964        Assembler<'a>: Vcvtqq2pdEmitter<A, B>,
10965    {
10966        <Self as Vcvtqq2pdEmitter<A, B>>::vcvtqq2pd(self, op0, op1);
10967    }
10968    /// `VCVTQQ2PD_ER`.
10969    ///
10970    /// Supported operand variants:
10971    ///
10972    /// ```text
10973    /// +---+----------+
10974    /// | # | Operands |
10975    /// +---+----------+
10976    /// | 1 | Zmm, Zmm |
10977    /// +---+----------+
10978    /// ```
10979    #[inline]
10980    pub fn vcvtqq2pd_er<A, B>(&mut self, op0: A, op1: B)
10981    where
10982        Assembler<'a>: Vcvtqq2pdErEmitter<A, B>,
10983    {
10984        <Self as Vcvtqq2pdErEmitter<A, B>>::vcvtqq2pd_er(self, op0, op1);
10985    }
10986    /// `VCVTQQ2PD_MASK`.
10987    ///
10988    /// Supported operand variants:
10989    ///
10990    /// ```text
10991    /// +---+----------+
10992    /// | # | Operands |
10993    /// +---+----------+
10994    /// | 1 | Xmm, Mem |
10995    /// | 2 | Xmm, Xmm |
10996    /// | 3 | Ymm, Mem |
10997    /// | 4 | Ymm, Ymm |
10998    /// | 5 | Zmm, Mem |
10999    /// | 6 | Zmm, Zmm |
11000    /// +---+----------+
11001    /// ```
11002    #[inline]
11003    pub fn vcvtqq2pd_mask<A, B>(&mut self, op0: A, op1: B)
11004    where
11005        Assembler<'a>: Vcvtqq2pdMaskEmitter<A, B>,
11006    {
11007        <Self as Vcvtqq2pdMaskEmitter<A, B>>::vcvtqq2pd_mask(self, op0, op1);
11008    }
11009    /// `VCVTQQ2PD_MASK_ER`.
11010    ///
11011    /// Supported operand variants:
11012    ///
11013    /// ```text
11014    /// +---+----------+
11015    /// | # | Operands |
11016    /// +---+----------+
11017    /// | 1 | Zmm, Zmm |
11018    /// +---+----------+
11019    /// ```
11020    #[inline]
11021    pub fn vcvtqq2pd_mask_er<A, B>(&mut self, op0: A, op1: B)
11022    where
11023        Assembler<'a>: Vcvtqq2pdMaskErEmitter<A, B>,
11024    {
11025        <Self as Vcvtqq2pdMaskErEmitter<A, B>>::vcvtqq2pd_mask_er(self, op0, op1);
11026    }
11027    /// `VCVTQQ2PD_MASKZ`.
11028    ///
11029    /// Supported operand variants:
11030    ///
11031    /// ```text
11032    /// +---+----------+
11033    /// | # | Operands |
11034    /// +---+----------+
11035    /// | 1 | Xmm, Mem |
11036    /// | 2 | Xmm, Xmm |
11037    /// | 3 | Ymm, Mem |
11038    /// | 4 | Ymm, Ymm |
11039    /// | 5 | Zmm, Mem |
11040    /// | 6 | Zmm, Zmm |
11041    /// +---+----------+
11042    /// ```
11043    #[inline]
11044    pub fn vcvtqq2pd_maskz<A, B>(&mut self, op0: A, op1: B)
11045    where
11046        Assembler<'a>: Vcvtqq2pdMaskzEmitter<A, B>,
11047    {
11048        <Self as Vcvtqq2pdMaskzEmitter<A, B>>::vcvtqq2pd_maskz(self, op0, op1);
11049    }
11050    /// `VCVTQQ2PD_MASKZ_ER`.
11051    ///
11052    /// Supported operand variants:
11053    ///
11054    /// ```text
11055    /// +---+----------+
11056    /// | # | Operands |
11057    /// +---+----------+
11058    /// | 1 | Zmm, Zmm |
11059    /// +---+----------+
11060    /// ```
11061    #[inline]
11062    pub fn vcvtqq2pd_maskz_er<A, B>(&mut self, op0: A, op1: B)
11063    where
11064        Assembler<'a>: Vcvtqq2pdMaskzErEmitter<A, B>,
11065    {
11066        <Self as Vcvtqq2pdMaskzErEmitter<A, B>>::vcvtqq2pd_maskz_er(self, op0, op1);
11067    }
11068    /// `VCVTQQ2PS`.
11069    ///
11070    /// Supported operand variants:
11071    ///
11072    /// ```text
11073    /// +---+----------+
11074    /// | # | Operands |
11075    /// +---+----------+
11076    /// | 1 | Xmm, Mem |
11077    /// | 2 | Xmm, Xmm |
11078    /// | 3 | Xmm, Ymm |
11079    /// | 4 | Ymm, Mem |
11080    /// | 5 | Ymm, Zmm |
11081    /// +---+----------+
11082    /// ```
11083    #[inline]
11084    pub fn vcvtqq2ps<A, B>(&mut self, op0: A, op1: B)
11085    where
11086        Assembler<'a>: Vcvtqq2psEmitter<A, B>,
11087    {
11088        <Self as Vcvtqq2psEmitter<A, B>>::vcvtqq2ps(self, op0, op1);
11089    }
11090    /// `VCVTQQ2PS_ER`.
11091    ///
11092    /// Supported operand variants:
11093    ///
11094    /// ```text
11095    /// +---+----------+
11096    /// | # | Operands |
11097    /// +---+----------+
11098    /// | 1 | Ymm, Zmm |
11099    /// +---+----------+
11100    /// ```
11101    #[inline]
11102    pub fn vcvtqq2ps_er<A, B>(&mut self, op0: A, op1: B)
11103    where
11104        Assembler<'a>: Vcvtqq2psErEmitter<A, B>,
11105    {
11106        <Self as Vcvtqq2psErEmitter<A, B>>::vcvtqq2ps_er(self, op0, op1);
11107    }
11108    /// `VCVTQQ2PS_MASK`.
11109    ///
11110    /// Supported operand variants:
11111    ///
11112    /// ```text
11113    /// +---+----------+
11114    /// | # | Operands |
11115    /// +---+----------+
11116    /// | 1 | Xmm, Mem |
11117    /// | 2 | Xmm, Xmm |
11118    /// | 3 | Xmm, Ymm |
11119    /// | 4 | Ymm, Mem |
11120    /// | 5 | Ymm, Zmm |
11121    /// +---+----------+
11122    /// ```
11123    #[inline]
11124    pub fn vcvtqq2ps_mask<A, B>(&mut self, op0: A, op1: B)
11125    where
11126        Assembler<'a>: Vcvtqq2psMaskEmitter<A, B>,
11127    {
11128        <Self as Vcvtqq2psMaskEmitter<A, B>>::vcvtqq2ps_mask(self, op0, op1);
11129    }
11130    /// `VCVTQQ2PS_MASK_ER`.
11131    ///
11132    /// Supported operand variants:
11133    ///
11134    /// ```text
11135    /// +---+----------+
11136    /// | # | Operands |
11137    /// +---+----------+
11138    /// | 1 | Ymm, Zmm |
11139    /// +---+----------+
11140    /// ```
11141    #[inline]
11142    pub fn vcvtqq2ps_mask_er<A, B>(&mut self, op0: A, op1: B)
11143    where
11144        Assembler<'a>: Vcvtqq2psMaskErEmitter<A, B>,
11145    {
11146        <Self as Vcvtqq2psMaskErEmitter<A, B>>::vcvtqq2ps_mask_er(self, op0, op1);
11147    }
11148    /// `VCVTQQ2PS_MASKZ`.
11149    ///
11150    /// Supported operand variants:
11151    ///
11152    /// ```text
11153    /// +---+----------+
11154    /// | # | Operands |
11155    /// +---+----------+
11156    /// | 1 | Xmm, Mem |
11157    /// | 2 | Xmm, Xmm |
11158    /// | 3 | Xmm, Ymm |
11159    /// | 4 | Ymm, Mem |
11160    /// | 5 | Ymm, Zmm |
11161    /// +---+----------+
11162    /// ```
11163    #[inline]
11164    pub fn vcvtqq2ps_maskz<A, B>(&mut self, op0: A, op1: B)
11165    where
11166        Assembler<'a>: Vcvtqq2psMaskzEmitter<A, B>,
11167    {
11168        <Self as Vcvtqq2psMaskzEmitter<A, B>>::vcvtqq2ps_maskz(self, op0, op1);
11169    }
11170    /// `VCVTQQ2PS_MASKZ_ER`.
11171    ///
11172    /// Supported operand variants:
11173    ///
11174    /// ```text
11175    /// +---+----------+
11176    /// | # | Operands |
11177    /// +---+----------+
11178    /// | 1 | Ymm, Zmm |
11179    /// +---+----------+
11180    /// ```
11181    #[inline]
11182    pub fn vcvtqq2ps_maskz_er<A, B>(&mut self, op0: A, op1: B)
11183    where
11184        Assembler<'a>: Vcvtqq2psMaskzErEmitter<A, B>,
11185    {
11186        <Self as Vcvtqq2psMaskzErEmitter<A, B>>::vcvtqq2ps_maskz_er(self, op0, op1);
11187    }
11188    /// `VCVTTPD2QQ`.
11189    ///
11190    /// Supported operand variants:
11191    ///
11192    /// ```text
11193    /// +---+----------+
11194    /// | # | Operands |
11195    /// +---+----------+
11196    /// | 1 | Xmm, Mem |
11197    /// | 2 | Xmm, Xmm |
11198    /// | 3 | Ymm, Mem |
11199    /// | 4 | Ymm, Ymm |
11200    /// | 5 | Zmm, Mem |
11201    /// | 6 | Zmm, Zmm |
11202    /// +---+----------+
11203    /// ```
11204    #[inline]
11205    pub fn vcvttpd2qq<A, B>(&mut self, op0: A, op1: B)
11206    where
11207        Assembler<'a>: Vcvttpd2qqEmitter<A, B>,
11208    {
11209        <Self as Vcvttpd2qqEmitter<A, B>>::vcvttpd2qq(self, op0, op1);
11210    }
11211    /// `VCVTTPD2QQ_MASK`.
11212    ///
11213    /// Supported operand variants:
11214    ///
11215    /// ```text
11216    /// +---+----------+
11217    /// | # | Operands |
11218    /// +---+----------+
11219    /// | 1 | Xmm, Mem |
11220    /// | 2 | Xmm, Xmm |
11221    /// | 3 | Ymm, Mem |
11222    /// | 4 | Ymm, Ymm |
11223    /// | 5 | Zmm, Mem |
11224    /// | 6 | Zmm, Zmm |
11225    /// +---+----------+
11226    /// ```
11227    #[inline]
11228    pub fn vcvttpd2qq_mask<A, B>(&mut self, op0: A, op1: B)
11229    where
11230        Assembler<'a>: Vcvttpd2qqMaskEmitter<A, B>,
11231    {
11232        <Self as Vcvttpd2qqMaskEmitter<A, B>>::vcvttpd2qq_mask(self, op0, op1);
11233    }
11234    /// `VCVTTPD2QQ_MASK_SAE`.
11235    ///
11236    /// Supported operand variants:
11237    ///
11238    /// ```text
11239    /// +---+----------+
11240    /// | # | Operands |
11241    /// +---+----------+
11242    /// | 1 | Zmm, Zmm |
11243    /// +---+----------+
11244    /// ```
11245    #[inline]
11246    pub fn vcvttpd2qq_mask_sae<A, B>(&mut self, op0: A, op1: B)
11247    where
11248        Assembler<'a>: Vcvttpd2qqMaskSaeEmitter<A, B>,
11249    {
11250        <Self as Vcvttpd2qqMaskSaeEmitter<A, B>>::vcvttpd2qq_mask_sae(self, op0, op1);
11251    }
11252    /// `VCVTTPD2QQ_MASKZ`.
11253    ///
11254    /// Supported operand variants:
11255    ///
11256    /// ```text
11257    /// +---+----------+
11258    /// | # | Operands |
11259    /// +---+----------+
11260    /// | 1 | Xmm, Mem |
11261    /// | 2 | Xmm, Xmm |
11262    /// | 3 | Ymm, Mem |
11263    /// | 4 | Ymm, Ymm |
11264    /// | 5 | Zmm, Mem |
11265    /// | 6 | Zmm, Zmm |
11266    /// +---+----------+
11267    /// ```
11268    #[inline]
11269    pub fn vcvttpd2qq_maskz<A, B>(&mut self, op0: A, op1: B)
11270    where
11271        Assembler<'a>: Vcvttpd2qqMaskzEmitter<A, B>,
11272    {
11273        <Self as Vcvttpd2qqMaskzEmitter<A, B>>::vcvttpd2qq_maskz(self, op0, op1);
11274    }
11275    /// `VCVTTPD2QQ_MASKZ_SAE`.
11276    ///
11277    /// Supported operand variants:
11278    ///
11279    /// ```text
11280    /// +---+----------+
11281    /// | # | Operands |
11282    /// +---+----------+
11283    /// | 1 | Zmm, Zmm |
11284    /// +---+----------+
11285    /// ```
11286    #[inline]
11287    pub fn vcvttpd2qq_maskz_sae<A, B>(&mut self, op0: A, op1: B)
11288    where
11289        Assembler<'a>: Vcvttpd2qqMaskzSaeEmitter<A, B>,
11290    {
11291        <Self as Vcvttpd2qqMaskzSaeEmitter<A, B>>::vcvttpd2qq_maskz_sae(self, op0, op1);
11292    }
11293    /// `VCVTTPD2QQ_SAE`.
11294    ///
11295    /// Supported operand variants:
11296    ///
11297    /// ```text
11298    /// +---+----------+
11299    /// | # | Operands |
11300    /// +---+----------+
11301    /// | 1 | Zmm, Zmm |
11302    /// +---+----------+
11303    /// ```
11304    #[inline]
11305    pub fn vcvttpd2qq_sae<A, B>(&mut self, op0: A, op1: B)
11306    where
11307        Assembler<'a>: Vcvttpd2qqSaeEmitter<A, B>,
11308    {
11309        <Self as Vcvttpd2qqSaeEmitter<A, B>>::vcvttpd2qq_sae(self, op0, op1);
11310    }
11311    /// `VCVTTPS2QQ`.
11312    ///
11313    /// Supported operand variants:
11314    ///
11315    /// ```text
11316    /// +---+----------+
11317    /// | # | Operands |
11318    /// +---+----------+
11319    /// | 1 | Xmm, Mem |
11320    /// | 2 | Xmm, Xmm |
11321    /// | 3 | Ymm, Mem |
11322    /// | 4 | Ymm, Xmm |
11323    /// | 5 | Zmm, Mem |
11324    /// | 6 | Zmm, Ymm |
11325    /// +---+----------+
11326    /// ```
11327    #[inline]
11328    pub fn vcvttps2qq<A, B>(&mut self, op0: A, op1: B)
11329    where
11330        Assembler<'a>: Vcvttps2qqEmitter<A, B>,
11331    {
11332        <Self as Vcvttps2qqEmitter<A, B>>::vcvttps2qq(self, op0, op1);
11333    }
11334    /// `VCVTTPS2QQ_MASK`.
11335    ///
11336    /// Supported operand variants:
11337    ///
11338    /// ```text
11339    /// +---+----------+
11340    /// | # | Operands |
11341    /// +---+----------+
11342    /// | 1 | Xmm, Mem |
11343    /// | 2 | Xmm, Xmm |
11344    /// | 3 | Ymm, Mem |
11345    /// | 4 | Ymm, Xmm |
11346    /// | 5 | Zmm, Mem |
11347    /// | 6 | Zmm, Ymm |
11348    /// +---+----------+
11349    /// ```
11350    #[inline]
11351    pub fn vcvttps2qq_mask<A, B>(&mut self, op0: A, op1: B)
11352    where
11353        Assembler<'a>: Vcvttps2qqMaskEmitter<A, B>,
11354    {
11355        <Self as Vcvttps2qqMaskEmitter<A, B>>::vcvttps2qq_mask(self, op0, op1);
11356    }
11357    /// `VCVTTPS2QQ_MASK_SAE`.
11358    ///
11359    /// Supported operand variants:
11360    ///
11361    /// ```text
11362    /// +---+----------+
11363    /// | # | Operands |
11364    /// +---+----------+
11365    /// | 1 | Zmm, Ymm |
11366    /// +---+----------+
11367    /// ```
11368    #[inline]
11369    pub fn vcvttps2qq_mask_sae<A, B>(&mut self, op0: A, op1: B)
11370    where
11371        Assembler<'a>: Vcvttps2qqMaskSaeEmitter<A, B>,
11372    {
11373        <Self as Vcvttps2qqMaskSaeEmitter<A, B>>::vcvttps2qq_mask_sae(self, op0, op1);
11374    }
11375    /// `VCVTTPS2QQ_MASKZ`.
11376    ///
11377    /// Supported operand variants:
11378    ///
11379    /// ```text
11380    /// +---+----------+
11381    /// | # | Operands |
11382    /// +---+----------+
11383    /// | 1 | Xmm, Mem |
11384    /// | 2 | Xmm, Xmm |
11385    /// | 3 | Ymm, Mem |
11386    /// | 4 | Ymm, Xmm |
11387    /// | 5 | Zmm, Mem |
11388    /// | 6 | Zmm, Ymm |
11389    /// +---+----------+
11390    /// ```
11391    #[inline]
11392    pub fn vcvttps2qq_maskz<A, B>(&mut self, op0: A, op1: B)
11393    where
11394        Assembler<'a>: Vcvttps2qqMaskzEmitter<A, B>,
11395    {
11396        <Self as Vcvttps2qqMaskzEmitter<A, B>>::vcvttps2qq_maskz(self, op0, op1);
11397    }
11398    /// `VCVTTPS2QQ_MASKZ_SAE`.
11399    ///
11400    /// Supported operand variants:
11401    ///
11402    /// ```text
11403    /// +---+----------+
11404    /// | # | Operands |
11405    /// +---+----------+
11406    /// | 1 | Zmm, Ymm |
11407    /// +---+----------+
11408    /// ```
11409    #[inline]
11410    pub fn vcvttps2qq_maskz_sae<A, B>(&mut self, op0: A, op1: B)
11411    where
11412        Assembler<'a>: Vcvttps2qqMaskzSaeEmitter<A, B>,
11413    {
11414        <Self as Vcvttps2qqMaskzSaeEmitter<A, B>>::vcvttps2qq_maskz_sae(self, op0, op1);
11415    }
11416    /// `VCVTTPS2QQ_SAE`.
11417    ///
11418    /// Supported operand variants:
11419    ///
11420    /// ```text
11421    /// +---+----------+
11422    /// | # | Operands |
11423    /// +---+----------+
11424    /// | 1 | Zmm, Ymm |
11425    /// +---+----------+
11426    /// ```
11427    #[inline]
11428    pub fn vcvttps2qq_sae<A, B>(&mut self, op0: A, op1: B)
11429    where
11430        Assembler<'a>: Vcvttps2qqSaeEmitter<A, B>,
11431    {
11432        <Self as Vcvttps2qqSaeEmitter<A, B>>::vcvttps2qq_sae(self, op0, op1);
11433    }
11434    /// `VFPCLASSPD`.
11435    ///
11436    /// Supported operand variants:
11437    ///
11438    /// ```text
11439    /// +---+----------------+
11440    /// | # | Operands       |
11441    /// +---+----------------+
11442    /// | 1 | KReg, Mem, Imm |
11443    /// | 2 | KReg, Xmm, Imm |
11444    /// | 3 | KReg, Ymm, Imm |
11445    /// | 4 | KReg, Zmm, Imm |
11446    /// +---+----------------+
11447    /// ```
11448    #[inline]
11449    pub fn vfpclasspd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11450    where
11451        Assembler<'a>: VfpclasspdEmitter<A, B, C>,
11452    {
11453        <Self as VfpclasspdEmitter<A, B, C>>::vfpclasspd(self, op0, op1, op2);
11454    }
11455    /// `VFPCLASSPD_MASK`.
11456    ///
11457    /// Supported operand variants:
11458    ///
11459    /// ```text
11460    /// +---+----------------+
11461    /// | # | Operands       |
11462    /// +---+----------------+
11463    /// | 1 | KReg, Mem, Imm |
11464    /// | 2 | KReg, Xmm, Imm |
11465    /// | 3 | KReg, Ymm, Imm |
11466    /// | 4 | KReg, Zmm, Imm |
11467    /// +---+----------------+
11468    /// ```
11469    #[inline]
11470    pub fn vfpclasspd_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11471    where
11472        Assembler<'a>: VfpclasspdMaskEmitter<A, B, C>,
11473    {
11474        <Self as VfpclasspdMaskEmitter<A, B, C>>::vfpclasspd_mask(self, op0, op1, op2);
11475    }
11476    /// `VFPCLASSPS`.
11477    ///
11478    /// Supported operand variants:
11479    ///
11480    /// ```text
11481    /// +---+----------------+
11482    /// | # | Operands       |
11483    /// +---+----------------+
11484    /// | 1 | KReg, Mem, Imm |
11485    /// | 2 | KReg, Xmm, Imm |
11486    /// | 3 | KReg, Ymm, Imm |
11487    /// | 4 | KReg, Zmm, Imm |
11488    /// +---+----------------+
11489    /// ```
11490    #[inline]
11491    pub fn vfpclassps<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11492    where
11493        Assembler<'a>: VfpclasspsEmitter<A, B, C>,
11494    {
11495        <Self as VfpclasspsEmitter<A, B, C>>::vfpclassps(self, op0, op1, op2);
11496    }
11497    /// `VFPCLASSPS_MASK`.
11498    ///
11499    /// Supported operand variants:
11500    ///
11501    /// ```text
11502    /// +---+----------------+
11503    /// | # | Operands       |
11504    /// +---+----------------+
11505    /// | 1 | KReg, Mem, Imm |
11506    /// | 2 | KReg, Xmm, Imm |
11507    /// | 3 | KReg, Ymm, Imm |
11508    /// | 4 | KReg, Zmm, Imm |
11509    /// +---+----------------+
11510    /// ```
11511    #[inline]
11512    pub fn vfpclassps_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11513    where
11514        Assembler<'a>: VfpclasspsMaskEmitter<A, B, C>,
11515    {
11516        <Self as VfpclasspsMaskEmitter<A, B, C>>::vfpclassps_mask(self, op0, op1, op2);
11517    }
11518    /// `VFPCLASSSD`.
11519    ///
11520    /// Supported operand variants:
11521    ///
11522    /// ```text
11523    /// +---+----------------+
11524    /// | # | Operands       |
11525    /// +---+----------------+
11526    /// | 1 | KReg, Mem, Imm |
11527    /// | 2 | KReg, Xmm, Imm |
11528    /// +---+----------------+
11529    /// ```
11530    #[inline]
11531    pub fn vfpclasssd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11532    where
11533        Assembler<'a>: VfpclasssdEmitter<A, B, C>,
11534    {
11535        <Self as VfpclasssdEmitter<A, B, C>>::vfpclasssd(self, op0, op1, op2);
11536    }
11537    /// `VFPCLASSSD_MASK`.
11538    ///
11539    /// Supported operand variants:
11540    ///
11541    /// ```text
11542    /// +---+----------------+
11543    /// | # | Operands       |
11544    /// +---+----------------+
11545    /// | 1 | KReg, Mem, Imm |
11546    /// | 2 | KReg, Xmm, Imm |
11547    /// +---+----------------+
11548    /// ```
11549    #[inline]
11550    pub fn vfpclasssd_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11551    where
11552        Assembler<'a>: VfpclasssdMaskEmitter<A, B, C>,
11553    {
11554        <Self as VfpclasssdMaskEmitter<A, B, C>>::vfpclasssd_mask(self, op0, op1, op2);
11555    }
11556    /// `VFPCLASSSS`.
11557    ///
11558    /// Supported operand variants:
11559    ///
11560    /// ```text
11561    /// +---+----------------+
11562    /// | # | Operands       |
11563    /// +---+----------------+
11564    /// | 1 | KReg, Mem, Imm |
11565    /// | 2 | KReg, Xmm, Imm |
11566    /// +---+----------------+
11567    /// ```
11568    #[inline]
11569    pub fn vfpclassss<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11570    where
11571        Assembler<'a>: VfpclassssEmitter<A, B, C>,
11572    {
11573        <Self as VfpclassssEmitter<A, B, C>>::vfpclassss(self, op0, op1, op2);
11574    }
11575    /// `VFPCLASSSS_MASK`.
11576    ///
11577    /// Supported operand variants:
11578    ///
11579    /// ```text
11580    /// +---+----------------+
11581    /// | # | Operands       |
11582    /// +---+----------------+
11583    /// | 1 | KReg, Mem, Imm |
11584    /// | 2 | KReg, Xmm, Imm |
11585    /// +---+----------------+
11586    /// ```
11587    #[inline]
11588    pub fn vfpclassss_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11589    where
11590        Assembler<'a>: VfpclassssMaskEmitter<A, B, C>,
11591    {
11592        <Self as VfpclassssMaskEmitter<A, B, C>>::vfpclassss_mask(self, op0, op1, op2);
11593    }
11594    /// `VINSERTF32X8`.
11595    ///
11596    /// Supported operand variants:
11597    ///
11598    /// ```text
11599    /// +---+--------------------+
11600    /// | # | Operands           |
11601    /// +---+--------------------+
11602    /// | 1 | Zmm, Zmm, Mem, Imm |
11603    /// | 2 | Zmm, Zmm, Ymm, Imm |
11604    /// +---+--------------------+
11605    /// ```
11606    #[inline]
11607    pub fn vinsertf32x8<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11608    where
11609        Assembler<'a>: Vinsertf32x8Emitter<A, B, C, D>,
11610    {
11611        <Self as Vinsertf32x8Emitter<A, B, C, D>>::vinsertf32x8(self, op0, op1, op2, op3);
11612    }
11613    /// `VINSERTF32X8_MASK`.
11614    ///
11615    /// Supported operand variants:
11616    ///
11617    /// ```text
11618    /// +---+--------------------+
11619    /// | # | Operands           |
11620    /// +---+--------------------+
11621    /// | 1 | Zmm, Zmm, Mem, Imm |
11622    /// | 2 | Zmm, Zmm, Ymm, Imm |
11623    /// +---+--------------------+
11624    /// ```
11625    #[inline]
11626    pub fn vinsertf32x8_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11627    where
11628        Assembler<'a>: Vinsertf32x8MaskEmitter<A, B, C, D>,
11629    {
11630        <Self as Vinsertf32x8MaskEmitter<A, B, C, D>>::vinsertf32x8_mask(self, op0, op1, op2, op3);
11631    }
11632    /// `VINSERTF32X8_MASKZ`.
11633    ///
11634    /// Supported operand variants:
11635    ///
11636    /// ```text
11637    /// +---+--------------------+
11638    /// | # | Operands           |
11639    /// +---+--------------------+
11640    /// | 1 | Zmm, Zmm, Mem, Imm |
11641    /// | 2 | Zmm, Zmm, Ymm, Imm |
11642    /// +---+--------------------+
11643    /// ```
11644    #[inline]
11645    pub fn vinsertf32x8_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11646    where
11647        Assembler<'a>: Vinsertf32x8MaskzEmitter<A, B, C, D>,
11648    {
11649        <Self as Vinsertf32x8MaskzEmitter<A, B, C, D>>::vinsertf32x8_maskz(
11650            self, op0, op1, op2, op3,
11651        );
11652    }
11653    /// `VINSERTF64X2`.
11654    ///
11655    /// Supported operand variants:
11656    ///
11657    /// ```text
11658    /// +---+--------------------+
11659    /// | # | Operands           |
11660    /// +---+--------------------+
11661    /// | 1 | Ymm, Ymm, Mem, Imm |
11662    /// | 2 | Ymm, Ymm, Xmm, Imm |
11663    /// | 3 | Zmm, Zmm, Mem, Imm |
11664    /// | 4 | Zmm, Zmm, Xmm, Imm |
11665    /// +---+--------------------+
11666    /// ```
11667    #[inline]
11668    pub fn vinsertf64x2<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11669    where
11670        Assembler<'a>: Vinsertf64x2Emitter<A, B, C, D>,
11671    {
11672        <Self as Vinsertf64x2Emitter<A, B, C, D>>::vinsertf64x2(self, op0, op1, op2, op3);
11673    }
11674    /// `VINSERTF64X2_MASK`.
11675    ///
11676    /// Supported operand variants:
11677    ///
11678    /// ```text
11679    /// +---+--------------------+
11680    /// | # | Operands           |
11681    /// +---+--------------------+
11682    /// | 1 | Ymm, Ymm, Mem, Imm |
11683    /// | 2 | Ymm, Ymm, Xmm, Imm |
11684    /// | 3 | Zmm, Zmm, Mem, Imm |
11685    /// | 4 | Zmm, Zmm, Xmm, Imm |
11686    /// +---+--------------------+
11687    /// ```
11688    #[inline]
11689    pub fn vinsertf64x2_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11690    where
11691        Assembler<'a>: Vinsertf64x2MaskEmitter<A, B, C, D>,
11692    {
11693        <Self as Vinsertf64x2MaskEmitter<A, B, C, D>>::vinsertf64x2_mask(self, op0, op1, op2, op3);
11694    }
11695    /// `VINSERTF64X2_MASKZ`.
11696    ///
11697    /// Supported operand variants:
11698    ///
11699    /// ```text
11700    /// +---+--------------------+
11701    /// | # | Operands           |
11702    /// +---+--------------------+
11703    /// | 1 | Ymm, Ymm, Mem, Imm |
11704    /// | 2 | Ymm, Ymm, Xmm, Imm |
11705    /// | 3 | Zmm, Zmm, Mem, Imm |
11706    /// | 4 | Zmm, Zmm, Xmm, Imm |
11707    /// +---+--------------------+
11708    /// ```
11709    #[inline]
11710    pub fn vinsertf64x2_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11711    where
11712        Assembler<'a>: Vinsertf64x2MaskzEmitter<A, B, C, D>,
11713    {
11714        <Self as Vinsertf64x2MaskzEmitter<A, B, C, D>>::vinsertf64x2_maskz(
11715            self, op0, op1, op2, op3,
11716        );
11717    }
11718    /// `VINSERTI32X8`.
11719    ///
11720    /// Supported operand variants:
11721    ///
11722    /// ```text
11723    /// +---+--------------------+
11724    /// | # | Operands           |
11725    /// +---+--------------------+
11726    /// | 1 | Zmm, Zmm, Mem, Imm |
11727    /// | 2 | Zmm, Zmm, Ymm, Imm |
11728    /// +---+--------------------+
11729    /// ```
11730    #[inline]
11731    pub fn vinserti32x8<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11732    where
11733        Assembler<'a>: Vinserti32x8Emitter<A, B, C, D>,
11734    {
11735        <Self as Vinserti32x8Emitter<A, B, C, D>>::vinserti32x8(self, op0, op1, op2, op3);
11736    }
11737    /// `VINSERTI32X8_MASK`.
11738    ///
11739    /// Supported operand variants:
11740    ///
11741    /// ```text
11742    /// +---+--------------------+
11743    /// | # | Operands           |
11744    /// +---+--------------------+
11745    /// | 1 | Zmm, Zmm, Mem, Imm |
11746    /// | 2 | Zmm, Zmm, Ymm, Imm |
11747    /// +---+--------------------+
11748    /// ```
11749    #[inline]
11750    pub fn vinserti32x8_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11751    where
11752        Assembler<'a>: Vinserti32x8MaskEmitter<A, B, C, D>,
11753    {
11754        <Self as Vinserti32x8MaskEmitter<A, B, C, D>>::vinserti32x8_mask(self, op0, op1, op2, op3);
11755    }
11756    /// `VINSERTI32X8_MASKZ`.
11757    ///
11758    /// Supported operand variants:
11759    ///
11760    /// ```text
11761    /// +---+--------------------+
11762    /// | # | Operands           |
11763    /// +---+--------------------+
11764    /// | 1 | Zmm, Zmm, Mem, Imm |
11765    /// | 2 | Zmm, Zmm, Ymm, Imm |
11766    /// +---+--------------------+
11767    /// ```
11768    #[inline]
11769    pub fn vinserti32x8_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11770    where
11771        Assembler<'a>: Vinserti32x8MaskzEmitter<A, B, C, D>,
11772    {
11773        <Self as Vinserti32x8MaskzEmitter<A, B, C, D>>::vinserti32x8_maskz(
11774            self, op0, op1, op2, op3,
11775        );
11776    }
11777    /// `VINSERTI64X2`.
11778    ///
11779    /// Supported operand variants:
11780    ///
11781    /// ```text
11782    /// +---+--------------------+
11783    /// | # | Operands           |
11784    /// +---+--------------------+
11785    /// | 1 | Ymm, Ymm, Mem, Imm |
11786    /// | 2 | Ymm, Ymm, Xmm, Imm |
11787    /// | 3 | Zmm, Zmm, Mem, Imm |
11788    /// | 4 | Zmm, Zmm, Xmm, Imm |
11789    /// +---+--------------------+
11790    /// ```
11791    #[inline]
11792    pub fn vinserti64x2<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11793    where
11794        Assembler<'a>: Vinserti64x2Emitter<A, B, C, D>,
11795    {
11796        <Self as Vinserti64x2Emitter<A, B, C, D>>::vinserti64x2(self, op0, op1, op2, op3);
11797    }
11798    /// `VINSERTI64X2_MASK`.
11799    ///
11800    /// Supported operand variants:
11801    ///
11802    /// ```text
11803    /// +---+--------------------+
11804    /// | # | Operands           |
11805    /// +---+--------------------+
11806    /// | 1 | Ymm, Ymm, Mem, Imm |
11807    /// | 2 | Ymm, Ymm, Xmm, Imm |
11808    /// | 3 | Zmm, Zmm, Mem, Imm |
11809    /// | 4 | Zmm, Zmm, Xmm, Imm |
11810    /// +---+--------------------+
11811    /// ```
11812    #[inline]
11813    pub fn vinserti64x2_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11814    where
11815        Assembler<'a>: Vinserti64x2MaskEmitter<A, B, C, D>,
11816    {
11817        <Self as Vinserti64x2MaskEmitter<A, B, C, D>>::vinserti64x2_mask(self, op0, op1, op2, op3);
11818    }
11819    /// `VINSERTI64X2_MASKZ`.
11820    ///
11821    /// Supported operand variants:
11822    ///
11823    /// ```text
11824    /// +---+--------------------+
11825    /// | # | Operands           |
11826    /// +---+--------------------+
11827    /// | 1 | Ymm, Ymm, Mem, Imm |
11828    /// | 2 | Ymm, Ymm, Xmm, Imm |
11829    /// | 3 | Zmm, Zmm, Mem, Imm |
11830    /// | 4 | Zmm, Zmm, Xmm, Imm |
11831    /// +---+--------------------+
11832    /// ```
11833    #[inline]
11834    pub fn vinserti64x2_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
11835    where
11836        Assembler<'a>: Vinserti64x2MaskzEmitter<A, B, C, D>,
11837    {
11838        <Self as Vinserti64x2MaskzEmitter<A, B, C, D>>::vinserti64x2_maskz(
11839            self, op0, op1, op2, op3,
11840        );
11841    }
11842    /// `VORPD`.
11843    ///
11844    /// Supported operand variants:
11845    ///
11846    /// ```text
11847    /// +---+---------------+
11848    /// | # | Operands      |
11849    /// +---+---------------+
11850    /// | 1 | Xmm, Xmm, Mem |
11851    /// | 2 | Xmm, Xmm, Xmm |
11852    /// | 3 | Ymm, Ymm, Mem |
11853    /// | 4 | Ymm, Ymm, Ymm |
11854    /// | 5 | Zmm, Zmm, Mem |
11855    /// | 6 | Zmm, Zmm, Zmm |
11856    /// +---+---------------+
11857    /// ```
11858    #[inline]
11859    pub fn vorpd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11860    where
11861        Assembler<'a>: VorpdEmitter<A, B, C>,
11862    {
11863        <Self as VorpdEmitter<A, B, C>>::vorpd(self, op0, op1, op2);
11864    }
11865    /// `VORPD_MASK`.
11866    ///
11867    /// Supported operand variants:
11868    ///
11869    /// ```text
11870    /// +---+---------------+
11871    /// | # | Operands      |
11872    /// +---+---------------+
11873    /// | 1 | Xmm, Xmm, Mem |
11874    /// | 2 | Xmm, Xmm, Xmm |
11875    /// | 3 | Ymm, Ymm, Mem |
11876    /// | 4 | Ymm, Ymm, Ymm |
11877    /// | 5 | Zmm, Zmm, Mem |
11878    /// | 6 | Zmm, Zmm, Zmm |
11879    /// +---+---------------+
11880    /// ```
11881    #[inline]
11882    pub fn vorpd_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11883    where
11884        Assembler<'a>: VorpdMaskEmitter<A, B, C>,
11885    {
11886        <Self as VorpdMaskEmitter<A, B, C>>::vorpd_mask(self, op0, op1, op2);
11887    }
11888    /// `VORPD_MASKZ`.
11889    ///
11890    /// Supported operand variants:
11891    ///
11892    /// ```text
11893    /// +---+---------------+
11894    /// | # | Operands      |
11895    /// +---+---------------+
11896    /// | 1 | Xmm, Xmm, Mem |
11897    /// | 2 | Xmm, Xmm, Xmm |
11898    /// | 3 | Ymm, Ymm, Mem |
11899    /// | 4 | Ymm, Ymm, Ymm |
11900    /// | 5 | Zmm, Zmm, Mem |
11901    /// | 6 | Zmm, Zmm, Zmm |
11902    /// +---+---------------+
11903    /// ```
11904    #[inline]
11905    pub fn vorpd_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11906    where
11907        Assembler<'a>: VorpdMaskzEmitter<A, B, C>,
11908    {
11909        <Self as VorpdMaskzEmitter<A, B, C>>::vorpd_maskz(self, op0, op1, op2);
11910    }
11911    /// `VORPS`.
11912    ///
11913    /// Supported operand variants:
11914    ///
11915    /// ```text
11916    /// +---+---------------+
11917    /// | # | Operands      |
11918    /// +---+---------------+
11919    /// | 1 | Xmm, Xmm, Mem |
11920    /// | 2 | Xmm, Xmm, Xmm |
11921    /// | 3 | Ymm, Ymm, Mem |
11922    /// | 4 | Ymm, Ymm, Ymm |
11923    /// | 5 | Zmm, Zmm, Mem |
11924    /// | 6 | Zmm, Zmm, Zmm |
11925    /// +---+---------------+
11926    /// ```
11927    #[inline]
11928    pub fn vorps<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11929    where
11930        Assembler<'a>: VorpsEmitter<A, B, C>,
11931    {
11932        <Self as VorpsEmitter<A, B, C>>::vorps(self, op0, op1, op2);
11933    }
11934    /// `VORPS_MASK`.
11935    ///
11936    /// Supported operand variants:
11937    ///
11938    /// ```text
11939    /// +---+---------------+
11940    /// | # | Operands      |
11941    /// +---+---------------+
11942    /// | 1 | Xmm, Xmm, Mem |
11943    /// | 2 | Xmm, Xmm, Xmm |
11944    /// | 3 | Ymm, Ymm, Mem |
11945    /// | 4 | Ymm, Ymm, Ymm |
11946    /// | 5 | Zmm, Zmm, Mem |
11947    /// | 6 | Zmm, Zmm, Zmm |
11948    /// +---+---------------+
11949    /// ```
11950    #[inline]
11951    pub fn vorps_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11952    where
11953        Assembler<'a>: VorpsMaskEmitter<A, B, C>,
11954    {
11955        <Self as VorpsMaskEmitter<A, B, C>>::vorps_mask(self, op0, op1, op2);
11956    }
11957    /// `VORPS_MASKZ`.
11958    ///
11959    /// Supported operand variants:
11960    ///
11961    /// ```text
11962    /// +---+---------------+
11963    /// | # | Operands      |
11964    /// +---+---------------+
11965    /// | 1 | Xmm, Xmm, Mem |
11966    /// | 2 | Xmm, Xmm, Xmm |
11967    /// | 3 | Ymm, Ymm, Mem |
11968    /// | 4 | Ymm, Ymm, Ymm |
11969    /// | 5 | Zmm, Zmm, Mem |
11970    /// | 6 | Zmm, Zmm, Zmm |
11971    /// +---+---------------+
11972    /// ```
11973    #[inline]
11974    pub fn vorps_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
11975    where
11976        Assembler<'a>: VorpsMaskzEmitter<A, B, C>,
11977    {
11978        <Self as VorpsMaskzEmitter<A, B, C>>::vorps_maskz(self, op0, op1, op2);
11979    }
11980    /// `VPMOVD2M`.
11981    ///
11982    /// Supported operand variants:
11983    ///
11984    /// ```text
11985    /// +---+-----------+
11986    /// | # | Operands  |
11987    /// +---+-----------+
11988    /// | 1 | KReg, Xmm |
11989    /// | 2 | KReg, Ymm |
11990    /// | 3 | KReg, Zmm |
11991    /// +---+-----------+
11992    /// ```
11993    #[inline]
11994    pub fn vpmovd2m<A, B>(&mut self, op0: A, op1: B)
11995    where
11996        Assembler<'a>: Vpmovd2mEmitter<A, B>,
11997    {
11998        <Self as Vpmovd2mEmitter<A, B>>::vpmovd2m(self, op0, op1);
11999    }
12000    /// `VPMOVM2D`.
12001    ///
12002    /// Supported operand variants:
12003    ///
12004    /// ```text
12005    /// +---+-----------+
12006    /// | # | Operands  |
12007    /// +---+-----------+
12008    /// | 1 | Xmm, KReg |
12009    /// | 2 | Ymm, KReg |
12010    /// | 3 | Zmm, KReg |
12011    /// +---+-----------+
12012    /// ```
12013    #[inline]
12014    pub fn vpmovm2d<A, B>(&mut self, op0: A, op1: B)
12015    where
12016        Assembler<'a>: Vpmovm2dEmitter<A, B>,
12017    {
12018        <Self as Vpmovm2dEmitter<A, B>>::vpmovm2d(self, op0, op1);
12019    }
12020    /// `VPMOVM2Q`.
12021    ///
12022    /// Supported operand variants:
12023    ///
12024    /// ```text
12025    /// +---+-----------+
12026    /// | # | Operands  |
12027    /// +---+-----------+
12028    /// | 1 | Xmm, KReg |
12029    /// | 2 | Ymm, KReg |
12030    /// | 3 | Zmm, KReg |
12031    /// +---+-----------+
12032    /// ```
12033    #[inline]
12034    pub fn vpmovm2q<A, B>(&mut self, op0: A, op1: B)
12035    where
12036        Assembler<'a>: Vpmovm2qEmitter<A, B>,
12037    {
12038        <Self as Vpmovm2qEmitter<A, B>>::vpmovm2q(self, op0, op1);
12039    }
12040    /// `VPMOVQ2M`.
12041    ///
12042    /// Supported operand variants:
12043    ///
12044    /// ```text
12045    /// +---+-----------+
12046    /// | # | Operands  |
12047    /// +---+-----------+
12048    /// | 1 | KReg, Xmm |
12049    /// | 2 | KReg, Ymm |
12050    /// | 3 | KReg, Zmm |
12051    /// +---+-----------+
12052    /// ```
12053    #[inline]
12054    pub fn vpmovq2m<A, B>(&mut self, op0: A, op1: B)
12055    where
12056        Assembler<'a>: Vpmovq2mEmitter<A, B>,
12057    {
12058        <Self as Vpmovq2mEmitter<A, B>>::vpmovq2m(self, op0, op1);
12059    }
12060    /// `VPMULLD`.
12061    ///
12062    /// Supported operand variants:
12063    ///
12064    /// ```text
12065    /// +---+---------------+
12066    /// | # | Operands      |
12067    /// +---+---------------+
12068    /// | 1 | Xmm, Xmm, Mem |
12069    /// | 2 | Xmm, Xmm, Xmm |
12070    /// | 3 | Ymm, Ymm, Mem |
12071    /// | 4 | Ymm, Ymm, Ymm |
12072    /// | 5 | Zmm, Zmm, Mem |
12073    /// | 6 | Zmm, Zmm, Zmm |
12074    /// +---+---------------+
12075    /// ```
12076    #[inline]
12077    pub fn vpmulld<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12078    where
12079        Assembler<'a>: VpmulldEmitter<A, B, C>,
12080    {
12081        <Self as VpmulldEmitter<A, B, C>>::vpmulld(self, op0, op1, op2);
12082    }
12083    /// `VPMULLD_MASK`.
12084    ///
12085    /// Supported operand variants:
12086    ///
12087    /// ```text
12088    /// +---+---------------+
12089    /// | # | Operands      |
12090    /// +---+---------------+
12091    /// | 1 | Xmm, Xmm, Mem |
12092    /// | 2 | Xmm, Xmm, Xmm |
12093    /// | 3 | Ymm, Ymm, Mem |
12094    /// | 4 | Ymm, Ymm, Ymm |
12095    /// | 5 | Zmm, Zmm, Mem |
12096    /// | 6 | Zmm, Zmm, Zmm |
12097    /// +---+---------------+
12098    /// ```
12099    #[inline]
12100    pub fn vpmulld_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12101    where
12102        Assembler<'a>: VpmulldMaskEmitter<A, B, C>,
12103    {
12104        <Self as VpmulldMaskEmitter<A, B, C>>::vpmulld_mask(self, op0, op1, op2);
12105    }
12106    /// `VPMULLD_MASKZ`.
12107    ///
12108    /// Supported operand variants:
12109    ///
12110    /// ```text
12111    /// +---+---------------+
12112    /// | # | Operands      |
12113    /// +---+---------------+
12114    /// | 1 | Xmm, Xmm, Mem |
12115    /// | 2 | Xmm, Xmm, Xmm |
12116    /// | 3 | Ymm, Ymm, Mem |
12117    /// | 4 | Ymm, Ymm, Ymm |
12118    /// | 5 | Zmm, Zmm, Mem |
12119    /// | 6 | Zmm, Zmm, Zmm |
12120    /// +---+---------------+
12121    /// ```
12122    #[inline]
12123    pub fn vpmulld_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12124    where
12125        Assembler<'a>: VpmulldMaskzEmitter<A, B, C>,
12126    {
12127        <Self as VpmulldMaskzEmitter<A, B, C>>::vpmulld_maskz(self, op0, op1, op2);
12128    }
12129    /// `VPMULLQ`.
12130    ///
12131    /// Supported operand variants:
12132    ///
12133    /// ```text
12134    /// +---+---------------+
12135    /// | # | Operands      |
12136    /// +---+---------------+
12137    /// | 1 | Xmm, Xmm, Mem |
12138    /// | 2 | Xmm, Xmm, Xmm |
12139    /// | 3 | Ymm, Ymm, Mem |
12140    /// | 4 | Ymm, Ymm, Ymm |
12141    /// | 5 | Zmm, Zmm, Mem |
12142    /// | 6 | Zmm, Zmm, Zmm |
12143    /// +---+---------------+
12144    /// ```
12145    #[inline]
12146    pub fn vpmullq<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12147    where
12148        Assembler<'a>: VpmullqEmitter<A, B, C>,
12149    {
12150        <Self as VpmullqEmitter<A, B, C>>::vpmullq(self, op0, op1, op2);
12151    }
12152    /// `VPMULLQ_MASK`.
12153    ///
12154    /// Supported operand variants:
12155    ///
12156    /// ```text
12157    /// +---+---------------+
12158    /// | # | Operands      |
12159    /// +---+---------------+
12160    /// | 1 | Xmm, Xmm, Mem |
12161    /// | 2 | Xmm, Xmm, Xmm |
12162    /// | 3 | Ymm, Ymm, Mem |
12163    /// | 4 | Ymm, Ymm, Ymm |
12164    /// | 5 | Zmm, Zmm, Mem |
12165    /// | 6 | Zmm, Zmm, Zmm |
12166    /// +---+---------------+
12167    /// ```
12168    #[inline]
12169    pub fn vpmullq_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12170    where
12171        Assembler<'a>: VpmullqMaskEmitter<A, B, C>,
12172    {
12173        <Self as VpmullqMaskEmitter<A, B, C>>::vpmullq_mask(self, op0, op1, op2);
12174    }
12175    /// `VPMULLQ_MASKZ`.
12176    ///
12177    /// Supported operand variants:
12178    ///
12179    /// ```text
12180    /// +---+---------------+
12181    /// | # | Operands      |
12182    /// +---+---------------+
12183    /// | 1 | Xmm, Xmm, Mem |
12184    /// | 2 | Xmm, Xmm, Xmm |
12185    /// | 3 | Ymm, Ymm, Mem |
12186    /// | 4 | Ymm, Ymm, Ymm |
12187    /// | 5 | Zmm, Zmm, Mem |
12188    /// | 6 | Zmm, Zmm, Zmm |
12189    /// +---+---------------+
12190    /// ```
12191    #[inline]
12192    pub fn vpmullq_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12193    where
12194        Assembler<'a>: VpmullqMaskzEmitter<A, B, C>,
12195    {
12196        <Self as VpmullqMaskzEmitter<A, B, C>>::vpmullq_maskz(self, op0, op1, op2);
12197    }
12198    /// `VRANGEPD`.
12199    ///
12200    /// Supported operand variants:
12201    ///
12202    /// ```text
12203    /// +---+--------------------+
12204    /// | # | Operands           |
12205    /// +---+--------------------+
12206    /// | 1 | Xmm, Xmm, Mem, Imm |
12207    /// | 2 | Xmm, Xmm, Xmm, Imm |
12208    /// | 3 | Ymm, Ymm, Mem, Imm |
12209    /// | 4 | Ymm, Ymm, Ymm, Imm |
12210    /// | 5 | Zmm, Zmm, Mem, Imm |
12211    /// | 6 | Zmm, Zmm, Zmm, Imm |
12212    /// +---+--------------------+
12213    /// ```
12214    #[inline]
12215    pub fn vrangepd<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12216    where
12217        Assembler<'a>: VrangepdEmitter<A, B, C, D>,
12218    {
12219        <Self as VrangepdEmitter<A, B, C, D>>::vrangepd(self, op0, op1, op2, op3);
12220    }
12221    /// `VRANGEPD_MASK`.
12222    ///
12223    /// Supported operand variants:
12224    ///
12225    /// ```text
12226    /// +---+--------------------+
12227    /// | # | Operands           |
12228    /// +---+--------------------+
12229    /// | 1 | Xmm, Xmm, Mem, Imm |
12230    /// | 2 | Xmm, Xmm, Xmm, Imm |
12231    /// | 3 | Ymm, Ymm, Mem, Imm |
12232    /// | 4 | Ymm, Ymm, Ymm, Imm |
12233    /// | 5 | Zmm, Zmm, Mem, Imm |
12234    /// | 6 | Zmm, Zmm, Zmm, Imm |
12235    /// +---+--------------------+
12236    /// ```
12237    #[inline]
12238    pub fn vrangepd_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12239    where
12240        Assembler<'a>: VrangepdMaskEmitter<A, B, C, D>,
12241    {
12242        <Self as VrangepdMaskEmitter<A, B, C, D>>::vrangepd_mask(self, op0, op1, op2, op3);
12243    }
12244    /// `VRANGEPD_MASK_SAE`.
12245    ///
12246    /// Supported operand variants:
12247    ///
12248    /// ```text
12249    /// +---+--------------------+
12250    /// | # | Operands           |
12251    /// +---+--------------------+
12252    /// | 1 | Zmm, Zmm, Zmm, Imm |
12253    /// +---+--------------------+
12254    /// ```
12255    #[inline]
12256    pub fn vrangepd_mask_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12257    where
12258        Assembler<'a>: VrangepdMaskSaeEmitter<A, B, C, D>,
12259    {
12260        <Self as VrangepdMaskSaeEmitter<A, B, C, D>>::vrangepd_mask_sae(self, op0, op1, op2, op3);
12261    }
12262    /// `VRANGEPD_MASKZ`.
12263    ///
12264    /// Supported operand variants:
12265    ///
12266    /// ```text
12267    /// +---+--------------------+
12268    /// | # | Operands           |
12269    /// +---+--------------------+
12270    /// | 1 | Xmm, Xmm, Mem, Imm |
12271    /// | 2 | Xmm, Xmm, Xmm, Imm |
12272    /// | 3 | Ymm, Ymm, Mem, Imm |
12273    /// | 4 | Ymm, Ymm, Ymm, Imm |
12274    /// | 5 | Zmm, Zmm, Mem, Imm |
12275    /// | 6 | Zmm, Zmm, Zmm, Imm |
12276    /// +---+--------------------+
12277    /// ```
12278    #[inline]
12279    pub fn vrangepd_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12280    where
12281        Assembler<'a>: VrangepdMaskzEmitter<A, B, C, D>,
12282    {
12283        <Self as VrangepdMaskzEmitter<A, B, C, D>>::vrangepd_maskz(self, op0, op1, op2, op3);
12284    }
12285    /// `VRANGEPD_MASKZ_SAE`.
12286    ///
12287    /// Supported operand variants:
12288    ///
12289    /// ```text
12290    /// +---+--------------------+
12291    /// | # | Operands           |
12292    /// +---+--------------------+
12293    /// | 1 | Zmm, Zmm, Zmm, Imm |
12294    /// +---+--------------------+
12295    /// ```
12296    #[inline]
12297    pub fn vrangepd_maskz_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12298    where
12299        Assembler<'a>: VrangepdMaskzSaeEmitter<A, B, C, D>,
12300    {
12301        <Self as VrangepdMaskzSaeEmitter<A, B, C, D>>::vrangepd_maskz_sae(self, op0, op1, op2, op3);
12302    }
12303    /// `VRANGEPD_SAE`.
12304    ///
12305    /// Supported operand variants:
12306    ///
12307    /// ```text
12308    /// +---+--------------------+
12309    /// | # | Operands           |
12310    /// +---+--------------------+
12311    /// | 1 | Zmm, Zmm, Zmm, Imm |
12312    /// +---+--------------------+
12313    /// ```
12314    #[inline]
12315    pub fn vrangepd_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12316    where
12317        Assembler<'a>: VrangepdSaeEmitter<A, B, C, D>,
12318    {
12319        <Self as VrangepdSaeEmitter<A, B, C, D>>::vrangepd_sae(self, op0, op1, op2, op3);
12320    }
12321    /// `VRANGEPS`.
12322    ///
12323    /// Supported operand variants:
12324    ///
12325    /// ```text
12326    /// +---+--------------------+
12327    /// | # | Operands           |
12328    /// +---+--------------------+
12329    /// | 1 | Xmm, Xmm, Mem, Imm |
12330    /// | 2 | Xmm, Xmm, Xmm, Imm |
12331    /// | 3 | Ymm, Ymm, Mem, Imm |
12332    /// | 4 | Ymm, Ymm, Ymm, Imm |
12333    /// | 5 | Zmm, Zmm, Mem, Imm |
12334    /// | 6 | Zmm, Zmm, Zmm, Imm |
12335    /// +---+--------------------+
12336    /// ```
12337    #[inline]
12338    pub fn vrangeps<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12339    where
12340        Assembler<'a>: VrangepsEmitter<A, B, C, D>,
12341    {
12342        <Self as VrangepsEmitter<A, B, C, D>>::vrangeps(self, op0, op1, op2, op3);
12343    }
12344    /// `VRANGEPS_MASK`.
12345    ///
12346    /// Supported operand variants:
12347    ///
12348    /// ```text
12349    /// +---+--------------------+
12350    /// | # | Operands           |
12351    /// +---+--------------------+
12352    /// | 1 | Xmm, Xmm, Mem, Imm |
12353    /// | 2 | Xmm, Xmm, Xmm, Imm |
12354    /// | 3 | Ymm, Ymm, Mem, Imm |
12355    /// | 4 | Ymm, Ymm, Ymm, Imm |
12356    /// | 5 | Zmm, Zmm, Mem, Imm |
12357    /// | 6 | Zmm, Zmm, Zmm, Imm |
12358    /// +---+--------------------+
12359    /// ```
12360    #[inline]
12361    pub fn vrangeps_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12362    where
12363        Assembler<'a>: VrangepsMaskEmitter<A, B, C, D>,
12364    {
12365        <Self as VrangepsMaskEmitter<A, B, C, D>>::vrangeps_mask(self, op0, op1, op2, op3);
12366    }
12367    /// `VRANGEPS_MASK_SAE`.
12368    ///
12369    /// Supported operand variants:
12370    ///
12371    /// ```text
12372    /// +---+--------------------+
12373    /// | # | Operands           |
12374    /// +---+--------------------+
12375    /// | 1 | Zmm, Zmm, Zmm, Imm |
12376    /// +---+--------------------+
12377    /// ```
12378    #[inline]
12379    pub fn vrangeps_mask_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12380    where
12381        Assembler<'a>: VrangepsMaskSaeEmitter<A, B, C, D>,
12382    {
12383        <Self as VrangepsMaskSaeEmitter<A, B, C, D>>::vrangeps_mask_sae(self, op0, op1, op2, op3);
12384    }
12385    /// `VRANGEPS_MASKZ`.
12386    ///
12387    /// Supported operand variants:
12388    ///
12389    /// ```text
12390    /// +---+--------------------+
12391    /// | # | Operands           |
12392    /// +---+--------------------+
12393    /// | 1 | Xmm, Xmm, Mem, Imm |
12394    /// | 2 | Xmm, Xmm, Xmm, Imm |
12395    /// | 3 | Ymm, Ymm, Mem, Imm |
12396    /// | 4 | Ymm, Ymm, Ymm, Imm |
12397    /// | 5 | Zmm, Zmm, Mem, Imm |
12398    /// | 6 | Zmm, Zmm, Zmm, Imm |
12399    /// +---+--------------------+
12400    /// ```
12401    #[inline]
12402    pub fn vrangeps_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12403    where
12404        Assembler<'a>: VrangepsMaskzEmitter<A, B, C, D>,
12405    {
12406        <Self as VrangepsMaskzEmitter<A, B, C, D>>::vrangeps_maskz(self, op0, op1, op2, op3);
12407    }
12408    /// `VRANGEPS_MASKZ_SAE`.
12409    ///
12410    /// Supported operand variants:
12411    ///
12412    /// ```text
12413    /// +---+--------------------+
12414    /// | # | Operands           |
12415    /// +---+--------------------+
12416    /// | 1 | Zmm, Zmm, Zmm, Imm |
12417    /// +---+--------------------+
12418    /// ```
12419    #[inline]
12420    pub fn vrangeps_maskz_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12421    where
12422        Assembler<'a>: VrangepsMaskzSaeEmitter<A, B, C, D>,
12423    {
12424        <Self as VrangepsMaskzSaeEmitter<A, B, C, D>>::vrangeps_maskz_sae(self, op0, op1, op2, op3);
12425    }
12426    /// `VRANGEPS_SAE`.
12427    ///
12428    /// Supported operand variants:
12429    ///
12430    /// ```text
12431    /// +---+--------------------+
12432    /// | # | Operands           |
12433    /// +---+--------------------+
12434    /// | 1 | Zmm, Zmm, Zmm, Imm |
12435    /// +---+--------------------+
12436    /// ```
12437    #[inline]
12438    pub fn vrangeps_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12439    where
12440        Assembler<'a>: VrangepsSaeEmitter<A, B, C, D>,
12441    {
12442        <Self as VrangepsSaeEmitter<A, B, C, D>>::vrangeps_sae(self, op0, op1, op2, op3);
12443    }
12444    /// `VRANGESD`.
12445    ///
12446    /// Supported operand variants:
12447    ///
12448    /// ```text
12449    /// +---+--------------------+
12450    /// | # | Operands           |
12451    /// +---+--------------------+
12452    /// | 1 | Xmm, Xmm, Mem, Imm |
12453    /// | 2 | Xmm, Xmm, Xmm, Imm |
12454    /// +---+--------------------+
12455    /// ```
12456    #[inline]
12457    pub fn vrangesd<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12458    where
12459        Assembler<'a>: VrangesdEmitter<A, B, C, D>,
12460    {
12461        <Self as VrangesdEmitter<A, B, C, D>>::vrangesd(self, op0, op1, op2, op3);
12462    }
12463    /// `VRANGESD_MASK`.
12464    ///
12465    /// Supported operand variants:
12466    ///
12467    /// ```text
12468    /// +---+--------------------+
12469    /// | # | Operands           |
12470    /// +---+--------------------+
12471    /// | 1 | Xmm, Xmm, Mem, Imm |
12472    /// | 2 | Xmm, Xmm, Xmm, Imm |
12473    /// +---+--------------------+
12474    /// ```
12475    #[inline]
12476    pub fn vrangesd_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12477    where
12478        Assembler<'a>: VrangesdMaskEmitter<A, B, C, D>,
12479    {
12480        <Self as VrangesdMaskEmitter<A, B, C, D>>::vrangesd_mask(self, op0, op1, op2, op3);
12481    }
12482    /// `VRANGESD_MASK_SAE`.
12483    ///
12484    /// Supported operand variants:
12485    ///
12486    /// ```text
12487    /// +---+--------------------+
12488    /// | # | Operands           |
12489    /// +---+--------------------+
12490    /// | 1 | Xmm, Xmm, Xmm, Imm |
12491    /// +---+--------------------+
12492    /// ```
12493    #[inline]
12494    pub fn vrangesd_mask_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12495    where
12496        Assembler<'a>: VrangesdMaskSaeEmitter<A, B, C, D>,
12497    {
12498        <Self as VrangesdMaskSaeEmitter<A, B, C, D>>::vrangesd_mask_sae(self, op0, op1, op2, op3);
12499    }
12500    /// `VRANGESD_MASKZ`.
12501    ///
12502    /// Supported operand variants:
12503    ///
12504    /// ```text
12505    /// +---+--------------------+
12506    /// | # | Operands           |
12507    /// +---+--------------------+
12508    /// | 1 | Xmm, Xmm, Mem, Imm |
12509    /// | 2 | Xmm, Xmm, Xmm, Imm |
12510    /// +---+--------------------+
12511    /// ```
12512    #[inline]
12513    pub fn vrangesd_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12514    where
12515        Assembler<'a>: VrangesdMaskzEmitter<A, B, C, D>,
12516    {
12517        <Self as VrangesdMaskzEmitter<A, B, C, D>>::vrangesd_maskz(self, op0, op1, op2, op3);
12518    }
12519    /// `VRANGESD_MASKZ_SAE`.
12520    ///
12521    /// Supported operand variants:
12522    ///
12523    /// ```text
12524    /// +---+--------------------+
12525    /// | # | Operands           |
12526    /// +---+--------------------+
12527    /// | 1 | Xmm, Xmm, Xmm, Imm |
12528    /// +---+--------------------+
12529    /// ```
12530    #[inline]
12531    pub fn vrangesd_maskz_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12532    where
12533        Assembler<'a>: VrangesdMaskzSaeEmitter<A, B, C, D>,
12534    {
12535        <Self as VrangesdMaskzSaeEmitter<A, B, C, D>>::vrangesd_maskz_sae(self, op0, op1, op2, op3);
12536    }
12537    /// `VRANGESD_SAE`.
12538    ///
12539    /// Supported operand variants:
12540    ///
12541    /// ```text
12542    /// +---+--------------------+
12543    /// | # | Operands           |
12544    /// +---+--------------------+
12545    /// | 1 | Xmm, Xmm, Xmm, Imm |
12546    /// +---+--------------------+
12547    /// ```
12548    #[inline]
12549    pub fn vrangesd_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12550    where
12551        Assembler<'a>: VrangesdSaeEmitter<A, B, C, D>,
12552    {
12553        <Self as VrangesdSaeEmitter<A, B, C, D>>::vrangesd_sae(self, op0, op1, op2, op3);
12554    }
12555    /// `VRANGESS`.
12556    ///
12557    /// Supported operand variants:
12558    ///
12559    /// ```text
12560    /// +---+--------------------+
12561    /// | # | Operands           |
12562    /// +---+--------------------+
12563    /// | 1 | Xmm, Xmm, Mem, Imm |
12564    /// | 2 | Xmm, Xmm, Xmm, Imm |
12565    /// +---+--------------------+
12566    /// ```
12567    #[inline]
12568    pub fn vrangess<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12569    where
12570        Assembler<'a>: VrangessEmitter<A, B, C, D>,
12571    {
12572        <Self as VrangessEmitter<A, B, C, D>>::vrangess(self, op0, op1, op2, op3);
12573    }
12574    /// `VRANGESS_MASK`.
12575    ///
12576    /// Supported operand variants:
12577    ///
12578    /// ```text
12579    /// +---+--------------------+
12580    /// | # | Operands           |
12581    /// +---+--------------------+
12582    /// | 1 | Xmm, Xmm, Mem, Imm |
12583    /// | 2 | Xmm, Xmm, Xmm, Imm |
12584    /// +---+--------------------+
12585    /// ```
12586    #[inline]
12587    pub fn vrangess_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12588    where
12589        Assembler<'a>: VrangessMaskEmitter<A, B, C, D>,
12590    {
12591        <Self as VrangessMaskEmitter<A, B, C, D>>::vrangess_mask(self, op0, op1, op2, op3);
12592    }
12593    /// `VRANGESS_MASK_SAE`.
12594    ///
12595    /// Supported operand variants:
12596    ///
12597    /// ```text
12598    /// +---+--------------------+
12599    /// | # | Operands           |
12600    /// +---+--------------------+
12601    /// | 1 | Xmm, Xmm, Xmm, Imm |
12602    /// +---+--------------------+
12603    /// ```
12604    #[inline]
12605    pub fn vrangess_mask_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12606    where
12607        Assembler<'a>: VrangessMaskSaeEmitter<A, B, C, D>,
12608    {
12609        <Self as VrangessMaskSaeEmitter<A, B, C, D>>::vrangess_mask_sae(self, op0, op1, op2, op3);
12610    }
12611    /// `VRANGESS_MASKZ`.
12612    ///
12613    /// Supported operand variants:
12614    ///
12615    /// ```text
12616    /// +---+--------------------+
12617    /// | # | Operands           |
12618    /// +---+--------------------+
12619    /// | 1 | Xmm, Xmm, Mem, Imm |
12620    /// | 2 | Xmm, Xmm, Xmm, Imm |
12621    /// +---+--------------------+
12622    /// ```
12623    #[inline]
12624    pub fn vrangess_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12625    where
12626        Assembler<'a>: VrangessMaskzEmitter<A, B, C, D>,
12627    {
12628        <Self as VrangessMaskzEmitter<A, B, C, D>>::vrangess_maskz(self, op0, op1, op2, op3);
12629    }
12630    /// `VRANGESS_MASKZ_SAE`.
12631    ///
12632    /// Supported operand variants:
12633    ///
12634    /// ```text
12635    /// +---+--------------------+
12636    /// | # | Operands           |
12637    /// +---+--------------------+
12638    /// | 1 | Xmm, Xmm, Xmm, Imm |
12639    /// +---+--------------------+
12640    /// ```
12641    #[inline]
12642    pub fn vrangess_maskz_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12643    where
12644        Assembler<'a>: VrangessMaskzSaeEmitter<A, B, C, D>,
12645    {
12646        <Self as VrangessMaskzSaeEmitter<A, B, C, D>>::vrangess_maskz_sae(self, op0, op1, op2, op3);
12647    }
12648    /// `VRANGESS_SAE`.
12649    ///
12650    /// Supported operand variants:
12651    ///
12652    /// ```text
12653    /// +---+--------------------+
12654    /// | # | Operands           |
12655    /// +---+--------------------+
12656    /// | 1 | Xmm, Xmm, Xmm, Imm |
12657    /// +---+--------------------+
12658    /// ```
12659    #[inline]
12660    pub fn vrangess_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12661    where
12662        Assembler<'a>: VrangessSaeEmitter<A, B, C, D>,
12663    {
12664        <Self as VrangessSaeEmitter<A, B, C, D>>::vrangess_sae(self, op0, op1, op2, op3);
12665    }
12666    /// `VREDUCEPD`.
12667    ///
12668    /// Supported operand variants:
12669    ///
12670    /// ```text
12671    /// +---+---------------+
12672    /// | # | Operands      |
12673    /// +---+---------------+
12674    /// | 1 | Xmm, Mem, Imm |
12675    /// | 2 | Xmm, Xmm, Imm |
12676    /// | 3 | Ymm, Mem, Imm |
12677    /// | 4 | Ymm, Ymm, Imm |
12678    /// | 5 | Zmm, Mem, Imm |
12679    /// | 6 | Zmm, Zmm, Imm |
12680    /// +---+---------------+
12681    /// ```
12682    #[inline]
12683    pub fn vreducepd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12684    where
12685        Assembler<'a>: VreducepdEmitter<A, B, C>,
12686    {
12687        <Self as VreducepdEmitter<A, B, C>>::vreducepd(self, op0, op1, op2);
12688    }
12689    /// `VREDUCEPD_MASK`.
12690    ///
12691    /// Supported operand variants:
12692    ///
12693    /// ```text
12694    /// +---+---------------+
12695    /// | # | Operands      |
12696    /// +---+---------------+
12697    /// | 1 | Xmm, Mem, Imm |
12698    /// | 2 | Xmm, Xmm, Imm |
12699    /// | 3 | Ymm, Mem, Imm |
12700    /// | 4 | Ymm, Ymm, Imm |
12701    /// | 5 | Zmm, Mem, Imm |
12702    /// | 6 | Zmm, Zmm, Imm |
12703    /// +---+---------------+
12704    /// ```
12705    #[inline]
12706    pub fn vreducepd_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12707    where
12708        Assembler<'a>: VreducepdMaskEmitter<A, B, C>,
12709    {
12710        <Self as VreducepdMaskEmitter<A, B, C>>::vreducepd_mask(self, op0, op1, op2);
12711    }
12712    /// `VREDUCEPD_MASK_SAE`.
12713    ///
12714    /// Supported operand variants:
12715    ///
12716    /// ```text
12717    /// +---+---------------+
12718    /// | # | Operands      |
12719    /// +---+---------------+
12720    /// | 1 | Zmm, Zmm, Imm |
12721    /// +---+---------------+
12722    /// ```
12723    #[inline]
12724    pub fn vreducepd_mask_sae<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12725    where
12726        Assembler<'a>: VreducepdMaskSaeEmitter<A, B, C>,
12727    {
12728        <Self as VreducepdMaskSaeEmitter<A, B, C>>::vreducepd_mask_sae(self, op0, op1, op2);
12729    }
12730    /// `VREDUCEPD_MASKZ`.
12731    ///
12732    /// Supported operand variants:
12733    ///
12734    /// ```text
12735    /// +---+---------------+
12736    /// | # | Operands      |
12737    /// +---+---------------+
12738    /// | 1 | Xmm, Mem, Imm |
12739    /// | 2 | Xmm, Xmm, Imm |
12740    /// | 3 | Ymm, Mem, Imm |
12741    /// | 4 | Ymm, Ymm, Imm |
12742    /// | 5 | Zmm, Mem, Imm |
12743    /// | 6 | Zmm, Zmm, Imm |
12744    /// +---+---------------+
12745    /// ```
12746    #[inline]
12747    pub fn vreducepd_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12748    where
12749        Assembler<'a>: VreducepdMaskzEmitter<A, B, C>,
12750    {
12751        <Self as VreducepdMaskzEmitter<A, B, C>>::vreducepd_maskz(self, op0, op1, op2);
12752    }
12753    /// `VREDUCEPD_MASKZ_SAE`.
12754    ///
12755    /// Supported operand variants:
12756    ///
12757    /// ```text
12758    /// +---+---------------+
12759    /// | # | Operands      |
12760    /// +---+---------------+
12761    /// | 1 | Zmm, Zmm, Imm |
12762    /// +---+---------------+
12763    /// ```
12764    #[inline]
12765    pub fn vreducepd_maskz_sae<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12766    where
12767        Assembler<'a>: VreducepdMaskzSaeEmitter<A, B, C>,
12768    {
12769        <Self as VreducepdMaskzSaeEmitter<A, B, C>>::vreducepd_maskz_sae(self, op0, op1, op2);
12770    }
12771    /// `VREDUCEPD_SAE`.
12772    ///
12773    /// Supported operand variants:
12774    ///
12775    /// ```text
12776    /// +---+---------------+
12777    /// | # | Operands      |
12778    /// +---+---------------+
12779    /// | 1 | Zmm, Zmm, Imm |
12780    /// +---+---------------+
12781    /// ```
12782    #[inline]
12783    pub fn vreducepd_sae<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12784    where
12785        Assembler<'a>: VreducepdSaeEmitter<A, B, C>,
12786    {
12787        <Self as VreducepdSaeEmitter<A, B, C>>::vreducepd_sae(self, op0, op1, op2);
12788    }
12789    /// `VREDUCEPS`.
12790    ///
12791    /// Supported operand variants:
12792    ///
12793    /// ```text
12794    /// +---+---------------+
12795    /// | # | Operands      |
12796    /// +---+---------------+
12797    /// | 1 | Xmm, Mem, Imm |
12798    /// | 2 | Xmm, Xmm, Imm |
12799    /// | 3 | Ymm, Mem, Imm |
12800    /// | 4 | Ymm, Ymm, Imm |
12801    /// | 5 | Zmm, Mem, Imm |
12802    /// | 6 | Zmm, Zmm, Imm |
12803    /// +---+---------------+
12804    /// ```
12805    #[inline]
12806    pub fn vreduceps<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12807    where
12808        Assembler<'a>: VreducepsEmitter<A, B, C>,
12809    {
12810        <Self as VreducepsEmitter<A, B, C>>::vreduceps(self, op0, op1, op2);
12811    }
12812    /// `VREDUCEPS_MASK`.
12813    ///
12814    /// Supported operand variants:
12815    ///
12816    /// ```text
12817    /// +---+---------------+
12818    /// | # | Operands      |
12819    /// +---+---------------+
12820    /// | 1 | Xmm, Mem, Imm |
12821    /// | 2 | Xmm, Xmm, Imm |
12822    /// | 3 | Ymm, Mem, Imm |
12823    /// | 4 | Ymm, Ymm, Imm |
12824    /// | 5 | Zmm, Mem, Imm |
12825    /// | 6 | Zmm, Zmm, Imm |
12826    /// +---+---------------+
12827    /// ```
12828    #[inline]
12829    pub fn vreduceps_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12830    where
12831        Assembler<'a>: VreducepsMaskEmitter<A, B, C>,
12832    {
12833        <Self as VreducepsMaskEmitter<A, B, C>>::vreduceps_mask(self, op0, op1, op2);
12834    }
12835    /// `VREDUCEPS_MASK_SAE`.
12836    ///
12837    /// Supported operand variants:
12838    ///
12839    /// ```text
12840    /// +---+---------------+
12841    /// | # | Operands      |
12842    /// +---+---------------+
12843    /// | 1 | Zmm, Zmm, Imm |
12844    /// +---+---------------+
12845    /// ```
12846    #[inline]
12847    pub fn vreduceps_mask_sae<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12848    where
12849        Assembler<'a>: VreducepsMaskSaeEmitter<A, B, C>,
12850    {
12851        <Self as VreducepsMaskSaeEmitter<A, B, C>>::vreduceps_mask_sae(self, op0, op1, op2);
12852    }
12853    /// `VREDUCEPS_MASKZ`.
12854    ///
12855    /// Supported operand variants:
12856    ///
12857    /// ```text
12858    /// +---+---------------+
12859    /// | # | Operands      |
12860    /// +---+---------------+
12861    /// | 1 | Xmm, Mem, Imm |
12862    /// | 2 | Xmm, Xmm, Imm |
12863    /// | 3 | Ymm, Mem, Imm |
12864    /// | 4 | Ymm, Ymm, Imm |
12865    /// | 5 | Zmm, Mem, Imm |
12866    /// | 6 | Zmm, Zmm, Imm |
12867    /// +---+---------------+
12868    /// ```
12869    #[inline]
12870    pub fn vreduceps_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12871    where
12872        Assembler<'a>: VreducepsMaskzEmitter<A, B, C>,
12873    {
12874        <Self as VreducepsMaskzEmitter<A, B, C>>::vreduceps_maskz(self, op0, op1, op2);
12875    }
12876    /// `VREDUCEPS_MASKZ_SAE`.
12877    ///
12878    /// Supported operand variants:
12879    ///
12880    /// ```text
12881    /// +---+---------------+
12882    /// | # | Operands      |
12883    /// +---+---------------+
12884    /// | 1 | Zmm, Zmm, Imm |
12885    /// +---+---------------+
12886    /// ```
12887    #[inline]
12888    pub fn vreduceps_maskz_sae<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12889    where
12890        Assembler<'a>: VreducepsMaskzSaeEmitter<A, B, C>,
12891    {
12892        <Self as VreducepsMaskzSaeEmitter<A, B, C>>::vreduceps_maskz_sae(self, op0, op1, op2);
12893    }
12894    /// `VREDUCEPS_SAE`.
12895    ///
12896    /// Supported operand variants:
12897    ///
12898    /// ```text
12899    /// +---+---------------+
12900    /// | # | Operands      |
12901    /// +---+---------------+
12902    /// | 1 | Zmm, Zmm, Imm |
12903    /// +---+---------------+
12904    /// ```
12905    #[inline]
12906    pub fn vreduceps_sae<A, B, C>(&mut self, op0: A, op1: B, op2: C)
12907    where
12908        Assembler<'a>: VreducepsSaeEmitter<A, B, C>,
12909    {
12910        <Self as VreducepsSaeEmitter<A, B, C>>::vreduceps_sae(self, op0, op1, op2);
12911    }
12912    /// `VREDUCESD`.
12913    ///
12914    /// Supported operand variants:
12915    ///
12916    /// ```text
12917    /// +---+--------------------+
12918    /// | # | Operands           |
12919    /// +---+--------------------+
12920    /// | 1 | Xmm, Xmm, Mem, Imm |
12921    /// | 2 | Xmm, Xmm, Xmm, Imm |
12922    /// +---+--------------------+
12923    /// ```
12924    #[inline]
12925    pub fn vreducesd<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12926    where
12927        Assembler<'a>: VreducesdEmitter<A, B, C, D>,
12928    {
12929        <Self as VreducesdEmitter<A, B, C, D>>::vreducesd(self, op0, op1, op2, op3);
12930    }
12931    /// `VREDUCESD_MASK`.
12932    ///
12933    /// Supported operand variants:
12934    ///
12935    /// ```text
12936    /// +---+--------------------+
12937    /// | # | Operands           |
12938    /// +---+--------------------+
12939    /// | 1 | Xmm, Xmm, Mem, Imm |
12940    /// | 2 | Xmm, Xmm, Xmm, Imm |
12941    /// +---+--------------------+
12942    /// ```
12943    #[inline]
12944    pub fn vreducesd_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12945    where
12946        Assembler<'a>: VreducesdMaskEmitter<A, B, C, D>,
12947    {
12948        <Self as VreducesdMaskEmitter<A, B, C, D>>::vreducesd_mask(self, op0, op1, op2, op3);
12949    }
12950    /// `VREDUCESD_MASK_SAE`.
12951    ///
12952    /// Supported operand variants:
12953    ///
12954    /// ```text
12955    /// +---+--------------------+
12956    /// | # | Operands           |
12957    /// +---+--------------------+
12958    /// | 1 | Xmm, Xmm, Xmm, Imm |
12959    /// +---+--------------------+
12960    /// ```
12961    #[inline]
12962    pub fn vreducesd_mask_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12963    where
12964        Assembler<'a>: VreducesdMaskSaeEmitter<A, B, C, D>,
12965    {
12966        <Self as VreducesdMaskSaeEmitter<A, B, C, D>>::vreducesd_mask_sae(self, op0, op1, op2, op3);
12967    }
12968    /// `VREDUCESD_MASKZ`.
12969    ///
12970    /// Supported operand variants:
12971    ///
12972    /// ```text
12973    /// +---+--------------------+
12974    /// | # | Operands           |
12975    /// +---+--------------------+
12976    /// | 1 | Xmm, Xmm, Mem, Imm |
12977    /// | 2 | Xmm, Xmm, Xmm, Imm |
12978    /// +---+--------------------+
12979    /// ```
12980    #[inline]
12981    pub fn vreducesd_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
12982    where
12983        Assembler<'a>: VreducesdMaskzEmitter<A, B, C, D>,
12984    {
12985        <Self as VreducesdMaskzEmitter<A, B, C, D>>::vreducesd_maskz(self, op0, op1, op2, op3);
12986    }
12987    /// `VREDUCESD_MASKZ_SAE`.
12988    ///
12989    /// Supported operand variants:
12990    ///
12991    /// ```text
12992    /// +---+--------------------+
12993    /// | # | Operands           |
12994    /// +---+--------------------+
12995    /// | 1 | Xmm, Xmm, Xmm, Imm |
12996    /// +---+--------------------+
12997    /// ```
12998    #[inline]
12999    pub fn vreducesd_maskz_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13000    where
13001        Assembler<'a>: VreducesdMaskzSaeEmitter<A, B, C, D>,
13002    {
13003        <Self as VreducesdMaskzSaeEmitter<A, B, C, D>>::vreducesd_maskz_sae(
13004            self, op0, op1, op2, op3,
13005        );
13006    }
13007    /// `VREDUCESD_SAE`.
13008    ///
13009    /// Supported operand variants:
13010    ///
13011    /// ```text
13012    /// +---+--------------------+
13013    /// | # | Operands           |
13014    /// +---+--------------------+
13015    /// | 1 | Xmm, Xmm, Xmm, Imm |
13016    /// +---+--------------------+
13017    /// ```
13018    #[inline]
13019    pub fn vreducesd_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13020    where
13021        Assembler<'a>: VreducesdSaeEmitter<A, B, C, D>,
13022    {
13023        <Self as VreducesdSaeEmitter<A, B, C, D>>::vreducesd_sae(self, op0, op1, op2, op3);
13024    }
13025    /// `VREDUCESS`.
13026    ///
13027    /// Supported operand variants:
13028    ///
13029    /// ```text
13030    /// +---+--------------------+
13031    /// | # | Operands           |
13032    /// +---+--------------------+
13033    /// | 1 | Xmm, Xmm, Mem, Imm |
13034    /// | 2 | Xmm, Xmm, Xmm, Imm |
13035    /// +---+--------------------+
13036    /// ```
13037    #[inline]
13038    pub fn vreducess<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13039    where
13040        Assembler<'a>: VreducessEmitter<A, B, C, D>,
13041    {
13042        <Self as VreducessEmitter<A, B, C, D>>::vreducess(self, op0, op1, op2, op3);
13043    }
13044    /// `VREDUCESS_MASK`.
13045    ///
13046    /// Supported operand variants:
13047    ///
13048    /// ```text
13049    /// +---+--------------------+
13050    /// | # | Operands           |
13051    /// +---+--------------------+
13052    /// | 1 | Xmm, Xmm, Mem, Imm |
13053    /// | 2 | Xmm, Xmm, Xmm, Imm |
13054    /// +---+--------------------+
13055    /// ```
13056    #[inline]
13057    pub fn vreducess_mask<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13058    where
13059        Assembler<'a>: VreducessMaskEmitter<A, B, C, D>,
13060    {
13061        <Self as VreducessMaskEmitter<A, B, C, D>>::vreducess_mask(self, op0, op1, op2, op3);
13062    }
13063    /// `VREDUCESS_MASK_SAE`.
13064    ///
13065    /// Supported operand variants:
13066    ///
13067    /// ```text
13068    /// +---+--------------------+
13069    /// | # | Operands           |
13070    /// +---+--------------------+
13071    /// | 1 | Xmm, Xmm, Xmm, Imm |
13072    /// +---+--------------------+
13073    /// ```
13074    #[inline]
13075    pub fn vreducess_mask_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13076    where
13077        Assembler<'a>: VreducessMaskSaeEmitter<A, B, C, D>,
13078    {
13079        <Self as VreducessMaskSaeEmitter<A, B, C, D>>::vreducess_mask_sae(self, op0, op1, op2, op3);
13080    }
13081    /// `VREDUCESS_MASKZ`.
13082    ///
13083    /// Supported operand variants:
13084    ///
13085    /// ```text
13086    /// +---+--------------------+
13087    /// | # | Operands           |
13088    /// +---+--------------------+
13089    /// | 1 | Xmm, Xmm, Mem, Imm |
13090    /// | 2 | Xmm, Xmm, Xmm, Imm |
13091    /// +---+--------------------+
13092    /// ```
13093    #[inline]
13094    pub fn vreducess_maskz<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13095    where
13096        Assembler<'a>: VreducessMaskzEmitter<A, B, C, D>,
13097    {
13098        <Self as VreducessMaskzEmitter<A, B, C, D>>::vreducess_maskz(self, op0, op1, op2, op3);
13099    }
13100    /// `VREDUCESS_MASKZ_SAE`.
13101    ///
13102    /// Supported operand variants:
13103    ///
13104    /// ```text
13105    /// +---+--------------------+
13106    /// | # | Operands           |
13107    /// +---+--------------------+
13108    /// | 1 | Xmm, Xmm, Xmm, Imm |
13109    /// +---+--------------------+
13110    /// ```
13111    #[inline]
13112    pub fn vreducess_maskz_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13113    where
13114        Assembler<'a>: VreducessMaskzSaeEmitter<A, B, C, D>,
13115    {
13116        <Self as VreducessMaskzSaeEmitter<A, B, C, D>>::vreducess_maskz_sae(
13117            self, op0, op1, op2, op3,
13118        );
13119    }
13120    /// `VREDUCESS_SAE`.
13121    ///
13122    /// Supported operand variants:
13123    ///
13124    /// ```text
13125    /// +---+--------------------+
13126    /// | # | Operands           |
13127    /// +---+--------------------+
13128    /// | 1 | Xmm, Xmm, Xmm, Imm |
13129    /// +---+--------------------+
13130    /// ```
13131    #[inline]
13132    pub fn vreducess_sae<A, B, C, D>(&mut self, op0: A, op1: B, op2: C, op3: D)
13133    where
13134        Assembler<'a>: VreducessSaeEmitter<A, B, C, D>,
13135    {
13136        <Self as VreducessSaeEmitter<A, B, C, D>>::vreducess_sae(self, op0, op1, op2, op3);
13137    }
13138    /// `VXORPD`.
13139    ///
13140    /// Supported operand variants:
13141    ///
13142    /// ```text
13143    /// +---+---------------+
13144    /// | # | Operands      |
13145    /// +---+---------------+
13146    /// | 1 | Xmm, Xmm, Mem |
13147    /// | 2 | Xmm, Xmm, Xmm |
13148    /// | 3 | Ymm, Ymm, Mem |
13149    /// | 4 | Ymm, Ymm, Ymm |
13150    /// | 5 | Zmm, Zmm, Mem |
13151    /// | 6 | Zmm, Zmm, Zmm |
13152    /// +---+---------------+
13153    /// ```
13154    #[inline]
13155    pub fn vxorpd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
13156    where
13157        Assembler<'a>: VxorpdEmitter<A, B, C>,
13158    {
13159        <Self as VxorpdEmitter<A, B, C>>::vxorpd(self, op0, op1, op2);
13160    }
13161    /// `VXORPD_MASK`.
13162    ///
13163    /// Supported operand variants:
13164    ///
13165    /// ```text
13166    /// +---+---------------+
13167    /// | # | Operands      |
13168    /// +---+---------------+
13169    /// | 1 | Xmm, Xmm, Mem |
13170    /// | 2 | Xmm, Xmm, Xmm |
13171    /// | 3 | Ymm, Ymm, Mem |
13172    /// | 4 | Ymm, Ymm, Ymm |
13173    /// | 5 | Zmm, Zmm, Mem |
13174    /// | 6 | Zmm, Zmm, Zmm |
13175    /// +---+---------------+
13176    /// ```
13177    #[inline]
13178    pub fn vxorpd_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
13179    where
13180        Assembler<'a>: VxorpdMaskEmitter<A, B, C>,
13181    {
13182        <Self as VxorpdMaskEmitter<A, B, C>>::vxorpd_mask(self, op0, op1, op2);
13183    }
13184    /// `VXORPD_MASKZ`.
13185    ///
13186    /// Supported operand variants:
13187    ///
13188    /// ```text
13189    /// +---+---------------+
13190    /// | # | Operands      |
13191    /// +---+---------------+
13192    /// | 1 | Xmm, Xmm, Mem |
13193    /// | 2 | Xmm, Xmm, Xmm |
13194    /// | 3 | Ymm, Ymm, Mem |
13195    /// | 4 | Ymm, Ymm, Ymm |
13196    /// | 5 | Zmm, Zmm, Mem |
13197    /// | 6 | Zmm, Zmm, Zmm |
13198    /// +---+---------------+
13199    /// ```
13200    #[inline]
13201    pub fn vxorpd_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
13202    where
13203        Assembler<'a>: VxorpdMaskzEmitter<A, B, C>,
13204    {
13205        <Self as VxorpdMaskzEmitter<A, B, C>>::vxorpd_maskz(self, op0, op1, op2);
13206    }
13207    /// `VXORPS`.
13208    ///
13209    /// Supported operand variants:
13210    ///
13211    /// ```text
13212    /// +---+---------------+
13213    /// | # | Operands      |
13214    /// +---+---------------+
13215    /// | 1 | Xmm, Xmm, Mem |
13216    /// | 2 | Xmm, Xmm, Xmm |
13217    /// | 3 | Ymm, Ymm, Mem |
13218    /// | 4 | Ymm, Ymm, Ymm |
13219    /// | 5 | Zmm, Zmm, Mem |
13220    /// | 6 | Zmm, Zmm, Zmm |
13221    /// +---+---------------+
13222    /// ```
13223    #[inline]
13224    pub fn vxorps<A, B, C>(&mut self, op0: A, op1: B, op2: C)
13225    where
13226        Assembler<'a>: VxorpsEmitter<A, B, C>,
13227    {
13228        <Self as VxorpsEmitter<A, B, C>>::vxorps(self, op0, op1, op2);
13229    }
13230    /// `VXORPS_MASK`.
13231    ///
13232    /// Supported operand variants:
13233    ///
13234    /// ```text
13235    /// +---+---------------+
13236    /// | # | Operands      |
13237    /// +---+---------------+
13238    /// | 1 | Xmm, Xmm, Mem |
13239    /// | 2 | Xmm, Xmm, Xmm |
13240    /// | 3 | Ymm, Ymm, Mem |
13241    /// | 4 | Ymm, Ymm, Ymm |
13242    /// | 5 | Zmm, Zmm, Mem |
13243    /// | 6 | Zmm, Zmm, Zmm |
13244    /// +---+---------------+
13245    /// ```
13246    #[inline]
13247    pub fn vxorps_mask<A, B, C>(&mut self, op0: A, op1: B, op2: C)
13248    where
13249        Assembler<'a>: VxorpsMaskEmitter<A, B, C>,
13250    {
13251        <Self as VxorpsMaskEmitter<A, B, C>>::vxorps_mask(self, op0, op1, op2);
13252    }
13253    /// `VXORPS_MASKZ`.
13254    ///
13255    /// Supported operand variants:
13256    ///
13257    /// ```text
13258    /// +---+---------------+
13259    /// | # | Operands      |
13260    /// +---+---------------+
13261    /// | 1 | Xmm, Xmm, Mem |
13262    /// | 2 | Xmm, Xmm, Xmm |
13263    /// | 3 | Ymm, Ymm, Mem |
13264    /// | 4 | Ymm, Ymm, Ymm |
13265    /// | 5 | Zmm, Zmm, Mem |
13266    /// | 6 | Zmm, Zmm, Zmm |
13267    /// +---+---------------+
13268    /// ```
13269    #[inline]
13270    pub fn vxorps_maskz<A, B, C>(&mut self, op0: A, op1: B, op2: C)
13271    where
13272        Assembler<'a>: VxorpsMaskzEmitter<A, B, C>,
13273    {
13274        <Self as VxorpsMaskzEmitter<A, B, C>>::vxorps_maskz(self, op0, op1, op2);
13275    }
13276}