Skip to main content

asmkit/x86/features/
CMPCCXADD.rs

1use super::super::opcodes::*;
2use crate::core::emitter::*;
3use crate::core::operand::*;
4use crate::x86::assembler::*;
5use crate::x86::operands::*;
6
7/// A dummy operand that represents no register. Here just for simplicity.
8const NOREG: Operand = Operand::new();
9
10/// `CMPBEXADD`.
11///
12/// Supported operand variants:
13///
14/// ```text
15/// +---+---------------+
16/// | # | Operands      |
17/// +---+---------------+
18/// | 1 | Mem, Gpd, Gpd |
19/// | 2 | Mem, Gpq, Gpq |
20/// +---+---------------+
21/// ```
22pub trait CmpbexaddEmitter<A, B, C> {
23    fn cmpbexadd(&mut self, op0: A, op1: B, op2: C);
24}
25
26impl<'a> CmpbexaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
27    fn cmpbexadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
28        self.emit(
29            CMPBEXADD32MRR,
30            op0.as_operand(),
31            op1.as_operand(),
32            op2.as_operand(),
33            &NOREG,
34        );
35    }
36}
37
38impl<'a> CmpbexaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
39    fn cmpbexadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
40        self.emit(
41            CMPBEXADD64MRR,
42            op0.as_operand(),
43            op1.as_operand(),
44            op2.as_operand(),
45            &NOREG,
46        );
47    }
48}
49
50/// `CMPBXADD`.
51///
52/// Supported operand variants:
53///
54/// ```text
55/// +---+---------------+
56/// | # | Operands      |
57/// +---+---------------+
58/// | 1 | Mem, Gpd, Gpd |
59/// | 2 | Mem, Gpq, Gpq |
60/// +---+---------------+
61/// ```
62pub trait CmpbxaddEmitter<A, B, C> {
63    fn cmpbxadd(&mut self, op0: A, op1: B, op2: C);
64}
65
66impl<'a> CmpbxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
67    fn cmpbxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
68        self.emit(
69            CMPBXADD32MRR,
70            op0.as_operand(),
71            op1.as_operand(),
72            op2.as_operand(),
73            &NOREG,
74        );
75    }
76}
77
78impl<'a> CmpbxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
79    fn cmpbxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
80        self.emit(
81            CMPBXADD64MRR,
82            op0.as_operand(),
83            op1.as_operand(),
84            op2.as_operand(),
85            &NOREG,
86        );
87    }
88}
89
90/// `CMPLEXADD`.
91///
92/// Supported operand variants:
93///
94/// ```text
95/// +---+---------------+
96/// | # | Operands      |
97/// +---+---------------+
98/// | 1 | Mem, Gpd, Gpd |
99/// | 2 | Mem, Gpq, Gpq |
100/// +---+---------------+
101/// ```
102pub trait CmplexaddEmitter<A, B, C> {
103    fn cmplexadd(&mut self, op0: A, op1: B, op2: C);
104}
105
106impl<'a> CmplexaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
107    fn cmplexadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
108        self.emit(
109            CMPLEXADD32MRR,
110            op0.as_operand(),
111            op1.as_operand(),
112            op2.as_operand(),
113            &NOREG,
114        );
115    }
116}
117
118impl<'a> CmplexaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
119    fn cmplexadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
120        self.emit(
121            CMPLEXADD64MRR,
122            op0.as_operand(),
123            op1.as_operand(),
124            op2.as_operand(),
125            &NOREG,
126        );
127    }
128}
129
130/// `CMPLXADD`.
131///
132/// Supported operand variants:
133///
134/// ```text
135/// +---+---------------+
136/// | # | Operands      |
137/// +---+---------------+
138/// | 1 | Mem, Gpd, Gpd |
139/// | 2 | Mem, Gpq, Gpq |
140/// +---+---------------+
141/// ```
142pub trait CmplxaddEmitter<A, B, C> {
143    fn cmplxadd(&mut self, op0: A, op1: B, op2: C);
144}
145
146impl<'a> CmplxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
147    fn cmplxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
148        self.emit(
149            CMPLXADD32MRR,
150            op0.as_operand(),
151            op1.as_operand(),
152            op2.as_operand(),
153            &NOREG,
154        );
155    }
156}
157
158impl<'a> CmplxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
159    fn cmplxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
160        self.emit(
161            CMPLXADD64MRR,
162            op0.as_operand(),
163            op1.as_operand(),
164            op2.as_operand(),
165            &NOREG,
166        );
167    }
168}
169
170/// `CMPNBEXADD`.
171///
172/// Supported operand variants:
173///
174/// ```text
175/// +---+---------------+
176/// | # | Operands      |
177/// +---+---------------+
178/// | 1 | Mem, Gpd, Gpd |
179/// | 2 | Mem, Gpq, Gpq |
180/// +---+---------------+
181/// ```
182pub trait CmpnbexaddEmitter<A, B, C> {
183    fn cmpnbexadd(&mut self, op0: A, op1: B, op2: C);
184}
185
186impl<'a> CmpnbexaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
187    fn cmpnbexadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
188        self.emit(
189            CMPNBEXADD32MRR,
190            op0.as_operand(),
191            op1.as_operand(),
192            op2.as_operand(),
193            &NOREG,
194        );
195    }
196}
197
198impl<'a> CmpnbexaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
199    fn cmpnbexadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
200        self.emit(
201            CMPNBEXADD64MRR,
202            op0.as_operand(),
203            op1.as_operand(),
204            op2.as_operand(),
205            &NOREG,
206        );
207    }
208}
209
210/// `CMPNBXADD`.
211///
212/// Supported operand variants:
213///
214/// ```text
215/// +---+---------------+
216/// | # | Operands      |
217/// +---+---------------+
218/// | 1 | Mem, Gpd, Gpd |
219/// | 2 | Mem, Gpq, Gpq |
220/// +---+---------------+
221/// ```
222pub trait CmpnbxaddEmitter<A, B, C> {
223    fn cmpnbxadd(&mut self, op0: A, op1: B, op2: C);
224}
225
226impl<'a> CmpnbxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
227    fn cmpnbxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
228        self.emit(
229            CMPNBXADD32MRR,
230            op0.as_operand(),
231            op1.as_operand(),
232            op2.as_operand(),
233            &NOREG,
234        );
235    }
236}
237
238impl<'a> CmpnbxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
239    fn cmpnbxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
240        self.emit(
241            CMPNBXADD64MRR,
242            op0.as_operand(),
243            op1.as_operand(),
244            op2.as_operand(),
245            &NOREG,
246        );
247    }
248}
249
250/// `CMPNLEXADD`.
251///
252/// Supported operand variants:
253///
254/// ```text
255/// +---+---------------+
256/// | # | Operands      |
257/// +---+---------------+
258/// | 1 | Mem, Gpd, Gpd |
259/// | 2 | Mem, Gpq, Gpq |
260/// +---+---------------+
261/// ```
262pub trait CmpnlexaddEmitter<A, B, C> {
263    fn cmpnlexadd(&mut self, op0: A, op1: B, op2: C);
264}
265
266impl<'a> CmpnlexaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
267    fn cmpnlexadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
268        self.emit(
269            CMPNLEXADD32MRR,
270            op0.as_operand(),
271            op1.as_operand(),
272            op2.as_operand(),
273            &NOREG,
274        );
275    }
276}
277
278impl<'a> CmpnlexaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
279    fn cmpnlexadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
280        self.emit(
281            CMPNLEXADD64MRR,
282            op0.as_operand(),
283            op1.as_operand(),
284            op2.as_operand(),
285            &NOREG,
286        );
287    }
288}
289
290/// `CMPNLXADD`.
291///
292/// Supported operand variants:
293///
294/// ```text
295/// +---+---------------+
296/// | # | Operands      |
297/// +---+---------------+
298/// | 1 | Mem, Gpd, Gpd |
299/// | 2 | Mem, Gpq, Gpq |
300/// +---+---------------+
301/// ```
302pub trait CmpnlxaddEmitter<A, B, C> {
303    fn cmpnlxadd(&mut self, op0: A, op1: B, op2: C);
304}
305
306impl<'a> CmpnlxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
307    fn cmpnlxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
308        self.emit(
309            CMPNLXADD32MRR,
310            op0.as_operand(),
311            op1.as_operand(),
312            op2.as_operand(),
313            &NOREG,
314        );
315    }
316}
317
318impl<'a> CmpnlxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
319    fn cmpnlxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
320        self.emit(
321            CMPNLXADD64MRR,
322            op0.as_operand(),
323            op1.as_operand(),
324            op2.as_operand(),
325            &NOREG,
326        );
327    }
328}
329
330/// `CMPNOXADD`.
331///
332/// Supported operand variants:
333///
334/// ```text
335/// +---+---------------+
336/// | # | Operands      |
337/// +---+---------------+
338/// | 1 | Mem, Gpd, Gpd |
339/// | 2 | Mem, Gpq, Gpq |
340/// +---+---------------+
341/// ```
342pub trait CmpnoxaddEmitter<A, B, C> {
343    fn cmpnoxadd(&mut self, op0: A, op1: B, op2: C);
344}
345
346impl<'a> CmpnoxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
347    fn cmpnoxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
348        self.emit(
349            CMPNOXADD32MRR,
350            op0.as_operand(),
351            op1.as_operand(),
352            op2.as_operand(),
353            &NOREG,
354        );
355    }
356}
357
358impl<'a> CmpnoxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
359    fn cmpnoxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
360        self.emit(
361            CMPNOXADD64MRR,
362            op0.as_operand(),
363            op1.as_operand(),
364            op2.as_operand(),
365            &NOREG,
366        );
367    }
368}
369
370/// `CMPNPXADD`.
371///
372/// Supported operand variants:
373///
374/// ```text
375/// +---+---------------+
376/// | # | Operands      |
377/// +---+---------------+
378/// | 1 | Mem, Gpd, Gpd |
379/// | 2 | Mem, Gpq, Gpq |
380/// +---+---------------+
381/// ```
382pub trait CmpnpxaddEmitter<A, B, C> {
383    fn cmpnpxadd(&mut self, op0: A, op1: B, op2: C);
384}
385
386impl<'a> CmpnpxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
387    fn cmpnpxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
388        self.emit(
389            CMPNPXADD32MRR,
390            op0.as_operand(),
391            op1.as_operand(),
392            op2.as_operand(),
393            &NOREG,
394        );
395    }
396}
397
398impl<'a> CmpnpxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
399    fn cmpnpxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
400        self.emit(
401            CMPNPXADD64MRR,
402            op0.as_operand(),
403            op1.as_operand(),
404            op2.as_operand(),
405            &NOREG,
406        );
407    }
408}
409
410/// `CMPNSXADD`.
411///
412/// Supported operand variants:
413///
414/// ```text
415/// +---+---------------+
416/// | # | Operands      |
417/// +---+---------------+
418/// | 1 | Mem, Gpd, Gpd |
419/// | 2 | Mem, Gpq, Gpq |
420/// +---+---------------+
421/// ```
422pub trait CmpnsxaddEmitter<A, B, C> {
423    fn cmpnsxadd(&mut self, op0: A, op1: B, op2: C);
424}
425
426impl<'a> CmpnsxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
427    fn cmpnsxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
428        self.emit(
429            CMPNSXADD32MRR,
430            op0.as_operand(),
431            op1.as_operand(),
432            op2.as_operand(),
433            &NOREG,
434        );
435    }
436}
437
438impl<'a> CmpnsxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
439    fn cmpnsxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
440        self.emit(
441            CMPNSXADD64MRR,
442            op0.as_operand(),
443            op1.as_operand(),
444            op2.as_operand(),
445            &NOREG,
446        );
447    }
448}
449
450/// `CMPNZXADD`.
451///
452/// Supported operand variants:
453///
454/// ```text
455/// +---+---------------+
456/// | # | Operands      |
457/// +---+---------------+
458/// | 1 | Mem, Gpd, Gpd |
459/// | 2 | Mem, Gpq, Gpq |
460/// +---+---------------+
461/// ```
462pub trait CmpnzxaddEmitter<A, B, C> {
463    fn cmpnzxadd(&mut self, op0: A, op1: B, op2: C);
464}
465
466impl<'a> CmpnzxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
467    fn cmpnzxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
468        self.emit(
469            CMPNZXADD32MRR,
470            op0.as_operand(),
471            op1.as_operand(),
472            op2.as_operand(),
473            &NOREG,
474        );
475    }
476}
477
478impl<'a> CmpnzxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
479    fn cmpnzxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
480        self.emit(
481            CMPNZXADD64MRR,
482            op0.as_operand(),
483            op1.as_operand(),
484            op2.as_operand(),
485            &NOREG,
486        );
487    }
488}
489
490/// `CMPOXADD`.
491///
492/// Supported operand variants:
493///
494/// ```text
495/// +---+---------------+
496/// | # | Operands      |
497/// +---+---------------+
498/// | 1 | Mem, Gpd, Gpd |
499/// | 2 | Mem, Gpq, Gpq |
500/// +---+---------------+
501/// ```
502pub trait CmpoxaddEmitter<A, B, C> {
503    fn cmpoxadd(&mut self, op0: A, op1: B, op2: C);
504}
505
506impl<'a> CmpoxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
507    fn cmpoxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
508        self.emit(
509            CMPOXADD32MRR,
510            op0.as_operand(),
511            op1.as_operand(),
512            op2.as_operand(),
513            &NOREG,
514        );
515    }
516}
517
518impl<'a> CmpoxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
519    fn cmpoxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
520        self.emit(
521            CMPOXADD64MRR,
522            op0.as_operand(),
523            op1.as_operand(),
524            op2.as_operand(),
525            &NOREG,
526        );
527    }
528}
529
530/// `CMPPXADD`.
531///
532/// Supported operand variants:
533///
534/// ```text
535/// +---+---------------+
536/// | # | Operands      |
537/// +---+---------------+
538/// | 1 | Mem, Gpd, Gpd |
539/// | 2 | Mem, Gpq, Gpq |
540/// +---+---------------+
541/// ```
542pub trait CmppxaddEmitter<A, B, C> {
543    fn cmppxadd(&mut self, op0: A, op1: B, op2: C);
544}
545
546impl<'a> CmppxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
547    fn cmppxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
548        self.emit(
549            CMPPXADD32MRR,
550            op0.as_operand(),
551            op1.as_operand(),
552            op2.as_operand(),
553            &NOREG,
554        );
555    }
556}
557
558impl<'a> CmppxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
559    fn cmppxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
560        self.emit(
561            CMPPXADD64MRR,
562            op0.as_operand(),
563            op1.as_operand(),
564            op2.as_operand(),
565            &NOREG,
566        );
567    }
568}
569
570/// `CMPSXADD`.
571///
572/// Supported operand variants:
573///
574/// ```text
575/// +---+---------------+
576/// | # | Operands      |
577/// +---+---------------+
578/// | 1 | Mem, Gpd, Gpd |
579/// | 2 | Mem, Gpq, Gpq |
580/// +---+---------------+
581/// ```
582pub trait CmpsxaddEmitter<A, B, C> {
583    fn cmpsxadd(&mut self, op0: A, op1: B, op2: C);
584}
585
586impl<'a> CmpsxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
587    fn cmpsxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
588        self.emit(
589            CMPSXADD32MRR,
590            op0.as_operand(),
591            op1.as_operand(),
592            op2.as_operand(),
593            &NOREG,
594        );
595    }
596}
597
598impl<'a> CmpsxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
599    fn cmpsxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
600        self.emit(
601            CMPSXADD64MRR,
602            op0.as_operand(),
603            op1.as_operand(),
604            op2.as_operand(),
605            &NOREG,
606        );
607    }
608}
609
610/// `CMPZXADD`.
611///
612/// Supported operand variants:
613///
614/// ```text
615/// +---+---------------+
616/// | # | Operands      |
617/// +---+---------------+
618/// | 1 | Mem, Gpd, Gpd |
619/// | 2 | Mem, Gpq, Gpq |
620/// +---+---------------+
621/// ```
622pub trait CmpzxaddEmitter<A, B, C> {
623    fn cmpzxadd(&mut self, op0: A, op1: B, op2: C);
624}
625
626impl<'a> CmpzxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
627    fn cmpzxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
628        self.emit(
629            CMPZXADD32MRR,
630            op0.as_operand(),
631            op1.as_operand(),
632            op2.as_operand(),
633            &NOREG,
634        );
635    }
636}
637
638impl<'a> CmpzxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
639    fn cmpzxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
640        self.emit(
641            CMPZXADD64MRR,
642            op0.as_operand(),
643            op1.as_operand(),
644            op2.as_operand(),
645            &NOREG,
646        );
647    }
648}
649
650/// `CMPCCXADD`.
651///
652/// Supported operand variants:
653///
654/// ```text
655/// +---+---------------+
656/// | # | Operands      |
657/// +---+---------------+
658/// | 1 | Mem, Gpd, Gpd |
659/// | 2 | Mem, Gpq, Gpq |
660/// +---+---------------+
661/// ```
662pub trait CmpccxaddEmitter<A, B, C> {
663    fn cmpccxadd(&mut self, op0: A, op1: B, op2: C);
664}
665
666impl<'a> CmpccxaddEmitter<Mem, Gpd, Gpd> for Assembler<'a> {
667    fn cmpccxadd(&mut self, op0: Mem, op1: Gpd, op2: Gpd) {
668        self.emit(
669            CMPCCXADD32MRR,
670            op0.as_operand(),
671            op1.as_operand(),
672            op2.as_operand(),
673            &NOREG,
674        );
675    }
676}
677
678impl<'a> CmpccxaddEmitter<Mem, Gpq, Gpq> for Assembler<'a> {
679    fn cmpccxadd(&mut self, op0: Mem, op1: Gpq, op2: Gpq) {
680        self.emit(
681            CMPCCXADD64MRR,
682            op0.as_operand(),
683            op1.as_operand(),
684            op2.as_operand(),
685            &NOREG,
686        );
687    }
688}
689
690impl<'a> Assembler<'a> {
691    /// `CMPBEXADD`.
692    ///
693    /// Supported operand variants:
694    ///
695    /// ```text
696    /// +---+---------------+
697    /// | # | Operands      |
698    /// +---+---------------+
699    /// | 1 | Mem, Gpd, Gpd |
700    /// | 2 | Mem, Gpq, Gpq |
701    /// +---+---------------+
702    /// ```
703    #[inline]
704    pub fn cmpbexadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
705    where
706        Assembler<'a>: CmpbexaddEmitter<A, B, C>,
707    {
708        <Self as CmpbexaddEmitter<A, B, C>>::cmpbexadd(self, op0, op1, op2);
709    }
710    /// `CMPBXADD`.
711    ///
712    /// Supported operand variants:
713    ///
714    /// ```text
715    /// +---+---------------+
716    /// | # | Operands      |
717    /// +---+---------------+
718    /// | 1 | Mem, Gpd, Gpd |
719    /// | 2 | Mem, Gpq, Gpq |
720    /// +---+---------------+
721    /// ```
722    #[inline]
723    pub fn cmpbxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
724    where
725        Assembler<'a>: CmpbxaddEmitter<A, B, C>,
726    {
727        <Self as CmpbxaddEmitter<A, B, C>>::cmpbxadd(self, op0, op1, op2);
728    }
729    /// `CMPLEXADD`.
730    ///
731    /// Supported operand variants:
732    ///
733    /// ```text
734    /// +---+---------------+
735    /// | # | Operands      |
736    /// +---+---------------+
737    /// | 1 | Mem, Gpd, Gpd |
738    /// | 2 | Mem, Gpq, Gpq |
739    /// +---+---------------+
740    /// ```
741    #[inline]
742    pub fn cmplexadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
743    where
744        Assembler<'a>: CmplexaddEmitter<A, B, C>,
745    {
746        <Self as CmplexaddEmitter<A, B, C>>::cmplexadd(self, op0, op1, op2);
747    }
748    /// `CMPLXADD`.
749    ///
750    /// Supported operand variants:
751    ///
752    /// ```text
753    /// +---+---------------+
754    /// | # | Operands      |
755    /// +---+---------------+
756    /// | 1 | Mem, Gpd, Gpd |
757    /// | 2 | Mem, Gpq, Gpq |
758    /// +---+---------------+
759    /// ```
760    #[inline]
761    pub fn cmplxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
762    where
763        Assembler<'a>: CmplxaddEmitter<A, B, C>,
764    {
765        <Self as CmplxaddEmitter<A, B, C>>::cmplxadd(self, op0, op1, op2);
766    }
767    /// `CMPNBEXADD`.
768    ///
769    /// Supported operand variants:
770    ///
771    /// ```text
772    /// +---+---------------+
773    /// | # | Operands      |
774    /// +---+---------------+
775    /// | 1 | Mem, Gpd, Gpd |
776    /// | 2 | Mem, Gpq, Gpq |
777    /// +---+---------------+
778    /// ```
779    #[inline]
780    pub fn cmpnbexadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
781    where
782        Assembler<'a>: CmpnbexaddEmitter<A, B, C>,
783    {
784        <Self as CmpnbexaddEmitter<A, B, C>>::cmpnbexadd(self, op0, op1, op2);
785    }
786    /// `CMPNBXADD`.
787    ///
788    /// Supported operand variants:
789    ///
790    /// ```text
791    /// +---+---------------+
792    /// | # | Operands      |
793    /// +---+---------------+
794    /// | 1 | Mem, Gpd, Gpd |
795    /// | 2 | Mem, Gpq, Gpq |
796    /// +---+---------------+
797    /// ```
798    #[inline]
799    pub fn cmpnbxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
800    where
801        Assembler<'a>: CmpnbxaddEmitter<A, B, C>,
802    {
803        <Self as CmpnbxaddEmitter<A, B, C>>::cmpnbxadd(self, op0, op1, op2);
804    }
805    /// `CMPNLEXADD`.
806    ///
807    /// Supported operand variants:
808    ///
809    /// ```text
810    /// +---+---------------+
811    /// | # | Operands      |
812    /// +---+---------------+
813    /// | 1 | Mem, Gpd, Gpd |
814    /// | 2 | Mem, Gpq, Gpq |
815    /// +---+---------------+
816    /// ```
817    #[inline]
818    pub fn cmpnlexadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
819    where
820        Assembler<'a>: CmpnlexaddEmitter<A, B, C>,
821    {
822        <Self as CmpnlexaddEmitter<A, B, C>>::cmpnlexadd(self, op0, op1, op2);
823    }
824    /// `CMPNLXADD`.
825    ///
826    /// Supported operand variants:
827    ///
828    /// ```text
829    /// +---+---------------+
830    /// | # | Operands      |
831    /// +---+---------------+
832    /// | 1 | Mem, Gpd, Gpd |
833    /// | 2 | Mem, Gpq, Gpq |
834    /// +---+---------------+
835    /// ```
836    #[inline]
837    pub fn cmpnlxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
838    where
839        Assembler<'a>: CmpnlxaddEmitter<A, B, C>,
840    {
841        <Self as CmpnlxaddEmitter<A, B, C>>::cmpnlxadd(self, op0, op1, op2);
842    }
843    /// `CMPNOXADD`.
844    ///
845    /// Supported operand variants:
846    ///
847    /// ```text
848    /// +---+---------------+
849    /// | # | Operands      |
850    /// +---+---------------+
851    /// | 1 | Mem, Gpd, Gpd |
852    /// | 2 | Mem, Gpq, Gpq |
853    /// +---+---------------+
854    /// ```
855    #[inline]
856    pub fn cmpnoxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
857    where
858        Assembler<'a>: CmpnoxaddEmitter<A, B, C>,
859    {
860        <Self as CmpnoxaddEmitter<A, B, C>>::cmpnoxadd(self, op0, op1, op2);
861    }
862    /// `CMPNPXADD`.
863    ///
864    /// Supported operand variants:
865    ///
866    /// ```text
867    /// +---+---------------+
868    /// | # | Operands      |
869    /// +---+---------------+
870    /// | 1 | Mem, Gpd, Gpd |
871    /// | 2 | Mem, Gpq, Gpq |
872    /// +---+---------------+
873    /// ```
874    #[inline]
875    pub fn cmpnpxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
876    where
877        Assembler<'a>: CmpnpxaddEmitter<A, B, C>,
878    {
879        <Self as CmpnpxaddEmitter<A, B, C>>::cmpnpxadd(self, op0, op1, op2);
880    }
881    /// `CMPNSXADD`.
882    ///
883    /// Supported operand variants:
884    ///
885    /// ```text
886    /// +---+---------------+
887    /// | # | Operands      |
888    /// +---+---------------+
889    /// | 1 | Mem, Gpd, Gpd |
890    /// | 2 | Mem, Gpq, Gpq |
891    /// +---+---------------+
892    /// ```
893    #[inline]
894    pub fn cmpnsxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
895    where
896        Assembler<'a>: CmpnsxaddEmitter<A, B, C>,
897    {
898        <Self as CmpnsxaddEmitter<A, B, C>>::cmpnsxadd(self, op0, op1, op2);
899    }
900    /// `CMPNZXADD`.
901    ///
902    /// Supported operand variants:
903    ///
904    /// ```text
905    /// +---+---------------+
906    /// | # | Operands      |
907    /// +---+---------------+
908    /// | 1 | Mem, Gpd, Gpd |
909    /// | 2 | Mem, Gpq, Gpq |
910    /// +---+---------------+
911    /// ```
912    #[inline]
913    pub fn cmpnzxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
914    where
915        Assembler<'a>: CmpnzxaddEmitter<A, B, C>,
916    {
917        <Self as CmpnzxaddEmitter<A, B, C>>::cmpnzxadd(self, op0, op1, op2);
918    }
919    /// `CMPOXADD`.
920    ///
921    /// Supported operand variants:
922    ///
923    /// ```text
924    /// +---+---------------+
925    /// | # | Operands      |
926    /// +---+---------------+
927    /// | 1 | Mem, Gpd, Gpd |
928    /// | 2 | Mem, Gpq, Gpq |
929    /// +---+---------------+
930    /// ```
931    #[inline]
932    pub fn cmpoxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
933    where
934        Assembler<'a>: CmpoxaddEmitter<A, B, C>,
935    {
936        <Self as CmpoxaddEmitter<A, B, C>>::cmpoxadd(self, op0, op1, op2);
937    }
938    /// `CMPPXADD`.
939    ///
940    /// Supported operand variants:
941    ///
942    /// ```text
943    /// +---+---------------+
944    /// | # | Operands      |
945    /// +---+---------------+
946    /// | 1 | Mem, Gpd, Gpd |
947    /// | 2 | Mem, Gpq, Gpq |
948    /// +---+---------------+
949    /// ```
950    #[inline]
951    pub fn cmppxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
952    where
953        Assembler<'a>: CmppxaddEmitter<A, B, C>,
954    {
955        <Self as CmppxaddEmitter<A, B, C>>::cmppxadd(self, op0, op1, op2);
956    }
957    /// `CMPSXADD`.
958    ///
959    /// Supported operand variants:
960    ///
961    /// ```text
962    /// +---+---------------+
963    /// | # | Operands      |
964    /// +---+---------------+
965    /// | 1 | Mem, Gpd, Gpd |
966    /// | 2 | Mem, Gpq, Gpq |
967    /// +---+---------------+
968    /// ```
969    #[inline]
970    pub fn cmpsxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
971    where
972        Assembler<'a>: CmpsxaddEmitter<A, B, C>,
973    {
974        <Self as CmpsxaddEmitter<A, B, C>>::cmpsxadd(self, op0, op1, op2);
975    }
976    /// `CMPZXADD`.
977    ///
978    /// Supported operand variants:
979    ///
980    /// ```text
981    /// +---+---------------+
982    /// | # | Operands      |
983    /// +---+---------------+
984    /// | 1 | Mem, Gpd, Gpd |
985    /// | 2 | Mem, Gpq, Gpq |
986    /// +---+---------------+
987    /// ```
988    #[inline]
989    pub fn cmpzxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
990    where
991        Assembler<'a>: CmpzxaddEmitter<A, B, C>,
992    {
993        <Self as CmpzxaddEmitter<A, B, C>>::cmpzxadd(self, op0, op1, op2);
994    }
995    /// `CMPCCXADD`.
996    ///
997    /// Supported operand variants:
998    ///
999    /// ```text
1000    /// +---+---------------+
1001    /// | # | Operands      |
1002    /// +---+---------------+
1003    /// | 1 | Mem, Gpd, Gpd |
1004    /// | 2 | Mem, Gpq, Gpq |
1005    /// +---+---------------+
1006    /// ```
1007    #[inline]
1008    pub fn cmpccxadd<A, B, C>(&mut self, op0: A, op1: B, op2: C)
1009    where
1010        Assembler<'a>: CmpccxaddEmitter<A, B, C>,
1011    {
1012        <Self as CmpccxaddEmitter<A, B, C>>::cmpccxadd(self, op0, op1, op2);
1013    }
1014}