1#![allow(dead_code, clippy::all)]
2use crate::{
3 core::{
4 buffer::{
5 CodeBuffer, CodeOffset, ConstantData, LabelUse, Reloc, RelocDistance, RelocTarget,
6 },
7 emitter::Emitter,
8 operand::*,
9 },
10 AsmError,
11};
12
13use super::opcodes::ALT_TAB;
14use super::{emitter::X86EmitterExplicit, operands::*};
15
16pub struct Assembler<'a> {
18 pub buffer: &'a mut CodeBuffer,
19 flags: u64,
20 extra_reg: Reg,
21 last_error: Option<AsmError>,
22}
23
24const RC_RN: u64 = 0x0000000;
25const RC_RD: u64 = 0x0800000;
26const RC_RU: u64 = 0x1000000;
27const RC_RZ: u64 = 0x1800000;
28const SEG_MASK: u64 = 0xe0000000;
29const ADDR32: u64 = 0x10000000;
30const LONG: u64 = 0x100000000;
31
32const OPC_66: u64 = 0x80000;
33const OPC_F2: u64 = 0x100000;
34const OPC_F3: u64 = 0x200000;
35const OPC_REXW: u64 = 0x400000;
36const OPC_LOCK: u64 = 0x800000;
37const OPC_VEXL0: u64 = 0x1000000;
38const OPC_VEXL1: u64 = 0x1800000;
39const OPC_EVEXL0: u64 = 0x2000000;
40const OPC_EVEXL1: u64 = 0x2800000;
41const OPC_EVEXL2: u64 = 0x3000000;
42const OPC_EVEXL3: u64 = 0x3800000;
43const OPC_EVEXB: u64 = 0x4000000;
44const OPC_VSIB: u64 = 0x8000000;
45const OPC_67: u64 = ADDR32;
46const OPC_SEG_MSK: u64 = 0xe0000000;
47const OPC_JMPL: u64 = 0; const OPC_MASK_MSK: u64 = 0xe00000000;
49const OPC_EVEXZ: u64 = 0x1000000000;
50const OPC_USER_MSK: u64 = OPC_67 | OPC_SEG_MSK | OPC_MASK_MSK;
51const OPC_FORCE_SIB: u64 = 0x2000000000;
52const OPC_DOWNGRADE_VEX: u64 = 0x4000000000;
53const OPC_DOWNGRADE_VEX_FLIPW: u64 = 0x40000000000;
54const OPC_EVEX_DISP8SCALE: u64 = 0x38000000000;
55const OPC_GPH_OP0: u64 = 0x200000000000;
56const OPC_GPH_OP1: u64 = 0x400000000000;
57const EPFX_REX_MSK: u64 = 0x43f;
58const EPFX_REX: u64 = 0x20;
59const EPFX_EVEX: u64 = 0x40;
60const EPFX_REXR: u64 = 0x10;
61const EPFX_REXX: u64 = 0x08;
62const EPFX_REXB: u64 = 0x04;
63const EPFX_REXR4: u64 = 0x02;
64const EPFX_REXB4: u64 = 0x01;
65const EPFX_REXX4: u64 = 0x400;
66const EPFX_VVVV_IDX: u64 = 11;
67
68fn op_imm_n(imm: Operand, immsz: usize) -> bool {
69 if !imm.is_imm() {
70 return false;
71 }
72 let imm = imm.as_::<Imm>();
73 if immsz == 0 && imm.value() == 0 {
74 return true;
75 }
76
77 if immsz == 1 && imm.is_int8() {
78 return true;
79 }
80 if immsz == 2 && imm.is_int16() {
81 return true;
82 }
83 if immsz == 3 && (imm.value() & 0xffffff) == imm.value() {
84 return true;
85 }
86 if immsz == 4 && imm.is_int32() {
87 return true;
88 }
89 if immsz == 8 {
90 return true;
91 }
92
93 false
94}
95
96fn opc_size(opc: u64, epfx: u64) -> usize {
97 let mut res = 1;
98
99 if (opc & OPC_EVEXL0) != 0 {
100 res += 4;
101 } else if (opc & OPC_VEXL0) != 0 {
102 if (opc & (OPC_REXW | 0x20000)) != 0 || (epfx & (EPFX_REXX | EPFX_REXB)) != 0 {
103 res += 3;
104 } else {
105 res += 2;
106 }
107 } else {
108 if (opc & OPC_LOCK) != 0 {
109 res += 1;
110 }
111 if (opc & OPC_66) != 0 {
112 res += 1;
113 }
114 if (opc & (OPC_F2 | OPC_F3)) != 0 {
115 res += 1;
116 }
117 if (opc & OPC_REXW) != 0 || (epfx & EPFX_REX_MSK) != 0 {
118 res += 1;
119 }
120 if (opc & 0x30000) != 0 {
121 res += 1;
122 }
123 if (opc & 0x20000) != 0 {
124 res += 1;
125 }
126 }
127 if (opc & OPC_SEG_MSK) != 0 {
128 res += 1;
129 }
130 if (opc & OPC_67) != 0 {
131 res += 1;
132 }
133 if (opc & 0x8000) != 0 {
134 res += 1;
135 }
136
137 res
138}
139
140#[derive(Copy, Clone, PartialEq, Eq)]
141enum Encoding {
142 NP,
143 M,
144 R,
145 M1,
146 MI,
147 MC,
148 MR,
149 RM,
150 RMA,
151 MRI,
152 RMI,
153 MRC,
154 AM,
155 MA,
156 I,
157 IA,
158 O,
159 OI,
160 OA,
161 S,
162 A,
163 D,
164 FD,
165 TD,
166 RVM,
167 RVMI,
168 RVMR,
169 RMV,
170 VM,
171 VMI,
172 MVR,
173 MRV,
174 MAX,
175}
176#[repr(C)]
177#[derive(Copy, Clone)]
178struct EncodingInfo {
179 modrm: u8,
180 modreg: u8,
181 vexreg: u8,
182 immidx: u8,
183 immctl: u8,
184 zregidx: u8,
185 zregval: u8,
186}
187
188impl EncodingInfo {
189 pub const fn new() -> Self {
190 Self {
191 modreg: 0,
192 modrm: 0,
193 vexreg: 0,
194 immctl: 0,
195 immidx: 0,
196 zregidx: 0,
197 zregval: 0,
198 }
199 }
200}
201
202const ENCODING_INFOS: [EncodingInfo; Encoding::MAX as usize] = {
203 let mut infos = [EncodingInfo::new(); Encoding::MAX as usize];
204 infos[Encoding::NP as usize] = EncodingInfo {
205 modrm: 0,
206 modreg: 0,
207 vexreg: 0,
208 immctl: 0,
209 immidx: 0,
210 zregidx: 0,
211 zregval: 0,
212 };
213 infos[Encoding::M as usize] = EncodingInfo {
214 modrm: 0x0 ^ 3,
215 modreg: 0,
216 vexreg: 0,
217 immctl: 0,
218 immidx: 0,
219 zregidx: 0,
220 zregval: 0,
221 };
222 infos[Encoding::R as usize] = EncodingInfo {
223 modrm: 0,
224 modreg: 0x0 ^ 3,
225 vexreg: 0,
226 immctl: 0,
227 immidx: 0,
228 zregidx: 0,
229 zregval: 0,
230 };
231 infos[Encoding::M1 as usize] = EncodingInfo {
232 modrm: 0x0 ^ 3,
233 modreg: 0,
234 vexreg: 0,
235 immctl: 1,
236 immidx: 1,
237 zregidx: 0,
238 zregval: 0,
239 };
240 infos[Encoding::MI as usize] = EncodingInfo {
241 modrm: 0x0 ^ 3,
242 modreg: 0,
243 vexreg: 0,
244 immctl: 4,
245 immidx: 1,
246 zregidx: 0,
247 zregval: 0,
248 };
249 infos[Encoding::MC as usize] = EncodingInfo {
250 modrm: 0x0 ^ 3,
251 modreg: 0,
252 vexreg: 0,
253 immctl: 0,
254 immidx: 0,
255 zregidx: 0x1 ^ 3,
256 zregval: 1,
257 };
258 infos[Encoding::MR as usize] = EncodingInfo {
259 modrm: 0x0 ^ 3,
260 modreg: 0x1 ^ 3,
261 vexreg: 0,
262 immctl: 0,
263 immidx: 0,
264 zregidx: 0,
265 zregval: 0,
266 };
267 infos[Encoding::RM as usize] = EncodingInfo {
268 modrm: 0x1 ^ 3,
269 modreg: 0x0 ^ 3,
270 vexreg: 0,
271 immctl: 0,
272 immidx: 0,
273 zregidx: 0,
274 zregval: 0,
275 };
276 infos[Encoding::RMA as usize] = EncodingInfo {
277 modrm: 0x1 ^ 3,
278 modreg: 0x0 ^ 3,
279 vexreg: 0,
280 immctl: 0,
281 immidx: 0,
282 zregidx: 0x2 ^ 3,
283 zregval: 0,
284 };
285 infos[Encoding::MRI as usize] = EncodingInfo {
286 modrm: 0x0 ^ 3,
287 modreg: 0x1 ^ 3,
288 vexreg: 0,
289 immctl: 4,
290 immidx: 2,
291 zregidx: 0,
292 zregval: 0,
293 };
294 infos[Encoding::RMI as usize] = EncodingInfo {
295 modrm: 0x1 ^ 3,
296 modreg: 0x0 ^ 3,
297 vexreg: 0,
298 immctl: 4,
299 immidx: 2,
300 zregidx: 0,
301 zregval: 0,
302 };
303 infos[Encoding::MRC as usize] = EncodingInfo {
304 modrm: 0x0 ^ 3,
305 modreg: 0x1 ^ 3,
306 vexreg: 0,
307 immctl: 0,
308 immidx: 0,
309 zregidx: 0x2 ^ 3,
310 zregval: 1,
311 };
312 infos[Encoding::AM as usize] = EncodingInfo {
313 modrm: 0x1 ^ 3,
314 modreg: 0,
315 vexreg: 0,
316 immctl: 0,
317 immidx: 0,
318 zregidx: 0x0 ^ 3,
319 zregval: 0,
320 };
321 infos[Encoding::MA as usize] = EncodingInfo {
322 modrm: 0x0 ^ 3,
323 modreg: 0,
324 vexreg: 0,
325 immctl: 0,
326 immidx: 0,
327 zregidx: 0x1 ^ 3,
328 zregval: 0,
329 };
330 infos[Encoding::I as usize] = EncodingInfo {
331 modrm: 0,
332 modreg: 0,
333 vexreg: 0,
334 immctl: 4,
335 immidx: 0,
336 zregidx: 0,
337 zregval: 0,
338 };
339 infos[Encoding::IA as usize] = EncodingInfo {
340 modrm: 0,
341 modreg: 0,
342 vexreg: 0,
343 immctl: 4,
344 immidx: 1,
345 zregidx: 0x0 ^ 3,
346 zregval: 0,
347 };
348 infos[Encoding::O as usize] = EncodingInfo {
349 modrm: 0,
350 modreg: 0x0 ^ 3,
351 vexreg: 0,
352 immctl: 0,
353 immidx: 0,
354 zregidx: 0,
355 zregval: 0,
356 };
357 infos[Encoding::OI as usize] = EncodingInfo {
358 modrm: 0,
359 modreg: 0x0 ^ 3,
360 vexreg: 0,
361 immctl: 4,
362 immidx: 1,
363 zregidx: 0,
364 zregval: 0,
365 };
366 infos[Encoding::OA as usize] = EncodingInfo {
367 modrm: 0,
368 modreg: 0x0 ^ 3,
369 vexreg: 0,
370 immctl: 0,
371 immidx: 0,
372 zregidx: 0x1 ^ 3,
373 zregval: 0,
374 };
375 infos[Encoding::S as usize] = EncodingInfo {
376 modrm: 0,
377 modreg: 0,
378 vexreg: 0,
379 immctl: 0,
380 immidx: 0,
381 zregidx: 0,
382 zregval: 0,
383 };
384 infos[Encoding::A as usize] = EncodingInfo {
385 modrm: 0,
386 modreg: 0,
387 vexreg: 0,
388 immctl: 0,
389 immidx: 0,
390 zregidx: 0x0 ^ 3,
391 zregval: 0,
392 };
393 infos[Encoding::D as usize] = EncodingInfo {
394 modrm: 0,
395 modreg: 0,
396 vexreg: 0,
397 immctl: 6,
398 immidx: 0,
399 zregidx: 0,
400 zregval: 0,
401 };
402 infos[Encoding::FD as usize] = EncodingInfo {
403 modrm: 0,
404 modreg: 0,
405 vexreg: 0,
406 immctl: 2,
407 immidx: 1,
408 zregidx: 0x0 ^ 3,
409 zregval: 0,
410 };
411 infos[Encoding::TD as usize] = EncodingInfo {
412 modrm: 0,
413 modreg: 0,
414 vexreg: 0,
415 immctl: 2,
416 immidx: 0,
417 zregidx: 0x1 ^ 3,
418 zregval: 0,
419 };
420 infos[Encoding::RVM as usize] = EncodingInfo {
421 modrm: 0x2 ^ 3,
422 modreg: 0x0 ^ 3,
423 vexreg: 0x1 ^ 3,
424 immctl: 0,
425 immidx: 0,
426 zregidx: 0,
427 zregval: 0,
428 };
429 infos[Encoding::RVMI as usize] = EncodingInfo {
430 modrm: 0x2 ^ 3,
431 modreg: 0x0 ^ 3,
432 vexreg: 0x1 ^ 3,
433 immctl: 4,
434 immidx: 3,
435 zregidx: 0,
436 zregval: 0,
437 };
438 infos[Encoding::RVMR as usize] = EncodingInfo {
439 modrm: 0x2 ^ 3,
440 modreg: 0x0 ^ 3,
441 vexreg: 0x1 ^ 3,
442 immctl: 3,
443 immidx: 3,
444 zregidx: 0,
445 zregval: 0,
446 };
447 infos[Encoding::RMV as usize] = EncodingInfo {
448 modrm: 0x1 ^ 3,
449 modreg: 0x0 ^ 3,
450 vexreg: 0x2 ^ 3,
451 immctl: 0,
452 immidx: 0,
453 zregidx: 0,
454 zregval: 0,
455 };
456 infos[Encoding::VM as usize] = EncodingInfo {
457 modrm: 0x1 ^ 3,
458 modreg: 0,
459 vexreg: 0x0 ^ 3,
460 immctl: 0,
461 immidx: 0,
462 zregidx: 0,
463 zregval: 0,
464 };
465 infos[Encoding::VMI as usize] = EncodingInfo {
466 modrm: 0x1 ^ 3,
467 modreg: 0,
468 vexreg: 0x0 ^ 3,
469 immctl: 4,
470 immidx: 2,
471 zregidx: 0,
472 zregval: 0,
473 };
474 infos[Encoding::MVR as usize] = EncodingInfo {
475 modrm: 0x0 ^ 3,
476 modreg: 0x2 ^ 3,
477 vexreg: 0x1 ^ 3,
478 immctl: 0,
479 immidx: 0,
480 zregidx: 0,
481 zregval: 0,
482 };
483 infos[Encoding::MRV as usize] = EncodingInfo {
484 modrm: 0x0 ^ 3,
485 modreg: 0x1 ^ 3,
486 vexreg: 0x2 ^ 3,
487 immctl: 0,
488 immidx: 0,
489 zregidx: 0,
490 zregval: 0,
491 };
492
493 infos
494};
495
496impl<'a> Assembler<'a> {
497 pub fn new(buf: &'a mut CodeBuffer) -> Self {
498 Self {
499 buffer: buf,
500 extra_reg: Reg::new(),
501 flags: 0,
502 last_error: None,
503 }
504 }
505
506 pub fn sae(&mut self) -> &mut Self {
507 self.flags |= 0x4000000;
508 self
509 }
510
511 pub fn rn_sae(&mut self) -> &mut Self {
512 self.flags |= 0x4000000 | RC_RN;
513 self
514 }
515
516 pub fn rd_sae(&mut self) -> &mut Self {
517 self.flags |= 0x4000000 | RC_RD;
518 self
519 }
520 pub fn ru_sae(&mut self) -> &mut Self {
521 self.flags |= 0x4000000 | RC_RU;
522 self
523 }
524
525 pub fn rz_sae(&mut self) -> &mut Self {
526 self.flags |= 0x4000000 | RC_RZ;
527 self
528 }
529
530 pub fn seg(&mut self, sreg: SReg) -> &mut Self {
531 self.flags |= ((sreg.id() & 0x7) as u64) << 29;
532 self
533 }
534
535 pub fn fs(&mut self) -> &mut Self {
536 self.seg(FS)
537 }
538
539 pub fn gs(&mut self) -> &mut Self {
540 self.seg(GS)
541 }
542
543 pub fn k(&mut self, k: KReg) -> &mut Self {
544 self.flags |= ((k.id() & 0x7) as u64) << 33;
545
546 self
547 }
548
549 pub fn rep(&mut self) -> &mut Self {
550 self.flags |= 0x200000;
551 self
552 }
553
554 pub fn repnz(&mut self) -> &mut Self {
555 self.flags |= 0x100000;
556 self
557 }
558
559 pub fn repz(&mut self) -> &mut Self {
560 self.rep()
561 }
562
563 pub fn lock(&mut self) -> &mut Self {
564 self.flags |= 0x800000;
565 self
566 }
567
568 pub fn long(&mut self) -> &mut Self {
569 self.flags |= LONG;
570 self
571 }
572
573 pub fn get_label(&mut self) -> Label {
574 self.buffer.get_label()
575 }
576
577 pub fn bind_label(&mut self, label: Label) {
578 self.buffer.bind_label(label);
579 }
580
581 pub fn add_constant(&mut self, c: impl Into<ConstantData>) -> Label {
582 let c = self.buffer.add_constant(c);
583 self.buffer.get_label_for_constant(c)
584 }
585
586 pub fn label_offset(&self, label: Label) -> CodeOffset {
587 self.buffer.label_offset(label)
588 }
589
590 fn encode_opcode(&mut self, opc: u64, epfx: u64) -> bool {
591 if opc & OPC_SEG_MSK != 0 {
592 self.buffer
593 .put1(((0x65643e362e2600u64 >> (8 * ((opc >> 29) & 7))) & 0xff) as u8);
594 }
595
596 if opc & OPC_67 != 0 {
597 self.buffer.put1(0x67);
598 }
599
600 if opc & OPC_EVEXL0 != 0 {
601 self.buffer.put1(0x62);
602 let mut b1 = (opc >> 16 & 7) as u8;
603 if (epfx & EPFX_REXR) == 0 {
604 b1 |= 0x80;
605 }
606 if (epfx & EPFX_REXX) == 0 {
607 b1 |= 0x40;
608 }
609 if (epfx & EPFX_REXB) == 0 {
610 b1 |= 0x20;
611 }
612 if (epfx & EPFX_REXR4) == 0 {
613 b1 |= 0x10;
614 }
615 if (epfx & EPFX_REXB4) != 0 {
616 b1 |= 0x08;
617 }
618 self.buffer.put1(b1);
619
620 let mut b2 = (opc >> 20 & 3) as u8;
621 if (epfx & EPFX_REXX4) == 0 {
622 b2 |= 0x04;
623 }
624 b2 |= ((!(epfx >> EPFX_VVVV_IDX) & 0xf) << 3) as u8;
625 if opc & OPC_REXW != 0 {
626 b2 |= 0x80;
627 }
628 self.buffer.put1(b2);
629
630 let mut b3 = (opc >> 33 & 7) as u8;
631 b3 |= ((!(epfx >> EPFX_VVVV_IDX) & 0x10) >> 1) as u8;
632 if opc & OPC_EVEXB != 0 {
633 b3 |= 0x10;
634 }
635 b3 |= ((opc >> 23 & 3) << 5) as u8;
636 if opc & OPC_EVEXZ != 0 {
637 b3 |= 0x80;
638 }
639 self.buffer.put1(b3);
640 } else if opc & OPC_VEXL0 != 0 {
641 if (epfx & (EPFX_REXR4 | EPFX_REXX4 | EPFX_REXB4 | (0x10 << EPFX_VVVV_IDX))) != 0 {
642 self.last_error = Some(AsmError::InvalidPrefix);
643 return true;
644 }
645
646 let vex3 = (opc & (OPC_REXW | 0x20000)) != 0 || (epfx & (EPFX_REXX | EPFX_REXB)) != 0;
647 let pp = (opc >> 20 & 3) as u8;
648 self.buffer.put1(0xc4 | !vex3 as u8);
649
650 let mut b2 = pp | if (opc & 0x800000) != 0 { 0x4 } else { 0 };
651
652 if vex3 {
653 let mut b1 = (opc >> 16 & 3) as u8;
654 if (epfx & EPFX_REXR) == 0 {
655 b1 |= 0x80;
656 }
657 if (epfx & EPFX_REXX) == 0 {
658 b1 |= 0x40;
659 }
660 if (epfx & EPFX_REXB) == 0 {
661 b1 |= 0x20;
662 }
663 self.buffer.put1(b1);
664
665 if (opc & OPC_REXW) != 0 {
666 b2 |= 0x80;
667 }
668 } else {
669 if (epfx & EPFX_REXR) == 0 {
670 b2 |= 0x80;
671 }
672 }
673
674 b2 |= ((!(epfx >> EPFX_VVVV_IDX) & 0xf) << 3) as u8;
675 self.buffer.put1(b2);
676 } else {
677 if opc & OPC_LOCK != 0 {
678 self.buffer.put1(0xF0);
679 }
680 if opc & OPC_66 != 0 {
681 self.buffer.put1(0x66);
682 }
683 if opc & OPC_F2 != 0 {
684 self.buffer.put1(0xF2);
685 }
686 if opc & OPC_F3 != 0 {
687 self.buffer.put1(0xF3);
688 }
689 if opc & OPC_REXW != 0 || epfx & EPFX_REX_MSK != 0 {
690 let mut rex = 0x40;
691 if opc & OPC_REXW != 0 {
692 rex |= 0x08;
693 }
694 if epfx & EPFX_REXR != 0 {
695 rex |= 0x04;
696 }
697 if epfx & EPFX_REXX != 0 {
698 rex |= 0x02;
699 }
700 if epfx & EPFX_REXB != 0 {
701 rex |= 0x01;
702 }
703 self.buffer.put1(rex);
704 }
705 if opc & 0x30000 != 0 {
706 self.buffer.put1(0x0F);
707 }
708 if opc & 0x30000 == 0x20000 {
709 self.buffer.put1(0x38);
710 }
711 if opc & 0x30000 == 0x30000 {
712 self.buffer.put1(0x3A);
713 }
714 }
715
716 self.buffer.put1((opc & 0xff) as u8);
717 if (opc & 0x8000) != 0 {
718 self.buffer.put1(((opc >> 8) & 0xff) as u8);
719 }
720 false
721 }
722
723 fn encode_imm(&mut self, imm: Operand, immsz: usize) -> bool {
724 if !op_imm_n(imm, immsz) {
725 self.last_error = Some(AsmError::InvalidOperand);
726 return true;
727 }
728 let imm = imm.as_::<Imm>().value() as u64;
729 for i in 0..immsz {
730 self.buffer.put1((imm >> 8 * i) as u8);
731 }
732
733 false
734 }
735
736 fn encode_operand(&mut self, opc: u64, mut epfx: u64, op0: Operand) {
737 if op0.id() & 0x8 != 0 {
738 epfx |= EPFX_REXB;
739 }
740
741 let has_rex = opc & OPC_REXW != 0 || (epfx & EPFX_REX_MSK) != 0;
742 if has_rex & op0.is_reg_type_of(RegType::Gp8Hi) {
743 self.last_error = Some(AsmError::InvalidOperand);
744 return;
745 }
746
747 let is_err = self.encode_opcode(opc, epfx);
748 if is_err {
749 return;
750 }
751 let ix = self.buffer.cur_offset() as usize - 1;
752 let byte = self.buffer.data()[ix];
753
754 self.buffer.data_mut()[ix] = (byte & 0xf8) | (op0.id() & 0x7) as u8;
755 }
756
757 fn encode_memory(
758 &mut self,
759 mut opc: u64,
760 mut epfx: u64,
761 op0: Operand,
762 op1: Operand,
763 immsz: usize,
764 ) -> bool {
765 if op0.is_reg() && op0.id() & 0x8 != 0 {
766 epfx |= EPFX_REXB;
767 }
768
769 if op0.is_reg() && op0.id() & 0x10 != 0 {
770 epfx |= EPFX_REXX | EPFX_EVEX;
771 }
772
773 if op0.is_mem() && op0.as_::<Mem>().base_id() & 0x8 != 0 {
774 epfx |= EPFX_REXB;
775 }
776
777 if op0.is_mem() && op0.as_::<Mem>().base_id() & 0x10 != 0 {
778 epfx |= EPFX_REXX | EPFX_EVEX;
779 }
780
781 if op0.is_mem() && op0.as_::<Mem>().index_id() & 0x8 != 0 {
782 epfx |= EPFX_REXX;
783 }
784
785 if op0.is_mem() && op0.as_::<Mem>().index_id() & 0x10 != 0 {
786 epfx |= if opc & OPC_VSIB != 0 {
787 0x10 << EPFX_VVVV_IDX
788 } else {
789 EPFX_REXX4
790 };
791 }
792
793 if op1.is_reg() && op1.id() & 0x8 != 0 {
794 epfx |= EPFX_REXR;
795 }
796
797 if op1.is_reg() && op1.id() & 0x10 != 0 {
798 epfx |= EPFX_REXR4;
799 }
800
801 let has_rex = opc & (OPC_REXW | OPC_VEXL0 | OPC_EVEXL0) != 0 || (epfx & EPFX_REX_MSK) != 0;
802 if has_rex && (op0.is_reg_type_of(RegType::Gp8Hi) || op1.is_reg_type_of(RegType::Gp8Hi)) {
803 self.last_error = Some(AsmError::InvalidOperand);
804 return true;
805 }
806
807 if epfx & (EPFX_EVEX | EPFX_REXB4 | EPFX_REXX4 | EPFX_REXR4 | (0x10 << EPFX_VVVV_IDX)) != 0
808 {
809 if opc & OPC_EVEXL0 == 0 {
810 self.last_error = Some(AsmError::InvalidPrefix);
811 return true;
812 }
813 } else if opc & OPC_DOWNGRADE_VEX != 0 {
814 opc &= !(OPC_EVEXL0 | OPC_EVEX_DISP8SCALE);
816 opc |= OPC_VEXL0;
817 if opc & OPC_DOWNGRADE_VEX_FLIPW != 0 {
818 opc ^= OPC_REXW;
819 }
820 }
821
822 if op0.is_reg() {
823 if self.encode_opcode(opc, epfx) {
824 return true;
825 }
826
827 self.buffer
828 .put1(0xc0 | ((op1.id() & 7) << 3) as u8 | (op0.id() & 7) as u8);
829 return false;
830 }
831
832 let opcsz = opc_size(opc, epfx);
833
834 let mut mod_ = 0;
835 let reg = op1.id() & 7;
836 let mut rm;
837 let mut scale = 0;
838 let mut idx = 4;
839 let mut base = 0;
840 let mut off = op0.as_::<Mem>().offset();
841 let mut withsib = opc & OPC_FORCE_SIB != 0;
842 let mem = op0.as_::<Mem>();
843 if mem.has_index() {
853 if opc & OPC_VSIB != 0 {
854 if mem.index_type() != RegType::X86Xmm {
855 self.last_error = Some(AsmError::InvalidOperand);
856 return true;
857 }
858
859 if opc & OPC_EVEXL0 != 0 && opc & OPC_MASK_MSK == 0 {
860 self.last_error = Some(AsmError::InvalidOperand);
861 return true;
862 }
863 } else {
864 if !matches!(mem.index_type(), RegType::Gp32 | RegType::Gp64) {
865 self.last_error = Some(AsmError::InvalidOperand);
866 return true;
867 }
868
869 if mem.index_id() == 4 {
870 self.last_error = Some(AsmError::InvalidOperand);
871 return true;
872 }
873 }
874
875 idx = mem.index_id() & 7;
876 let scalabs = mem.shift();
877 if (scalabs & (scalabs.wrapping_sub(1))) != 0 {
878 self.last_error = Some(AsmError::InvalidOperand);
879 return true;
880 }
881 scale = if scalabs & 0xa != 0 { 1 } else { 0 } | if scalabs & 0xf != 0 { 2 } else { 0 };
882 withsib = true;
883 }
884
885 let mut dispsz = 0;
886 let mut label_use = None;
887 let mut reloc = None;
888 if !mem.has_base() {
889 base = 5;
890 rm = 4;
891 dispsz = 4;
892 } else if mem.has_base_reg() && mem.base_reg().is_rip() {
893 rm = 5;
894 dispsz = 4;
895 if withsib {
896 self.last_error = Some(AsmError::InvalidOperand);
897 return true;
898 }
899 } else if mem.has_base_label() {
900 rm = 5;
901 if withsib {
902 self.last_error = Some(AsmError::InvalidOperand);
903 return true;
904 }
905 dispsz = 4;
906 label_use = Some((mem.base_id(), LabelUse::X86JmpRel32));
907 } else if mem.has_base_sym() {
908 rm = 5;
909 if withsib {
910 self.last_error = Some(AsmError::InvalidOperand);
911 return true;
912 }
913 dispsz = 4;
914 let sym = Sym::from_id(mem.base_id());
915 let distance = self.buffer.symbol_distance(sym);
916
917 if distance == RelocDistance::Near {
918 reloc = Some((sym, Reloc::X86PCRel4));
919 } else {
920 reloc = Some((sym, Reloc::X86GOTPCRel4))
921 }
922 } else {
923 if !matches!(mem.base_type(), RegType::Gp32 | RegType::Gp64) {
924 self.last_error = Some(AsmError::InvalidOperand);
925 return true;
926 }
927
928 rm = mem.base_id() & 0x7;
929
930 if withsib || rm == 4 {
931 base = rm;
932 rm = 4;
933 }
934
935 if off != 0 {
936 let disp8scale = (opc & OPC_EVEX_DISP8SCALE) >> 39;
937
938 if (off & ((1 << (disp8scale + 1)) - 1)) == 0
939 && op_imm_n(*imm(off >> disp8scale).as_operand(), 1)
940 && opc & LONG == 0
941 {
942 mod_ = 0x40;
943 dispsz = 1;
944 off >>= disp8scale;
945 } else {
946 mod_ = 0x80;
947 dispsz = 4;
948 }
949 } else if rm == 5 {
950 mod_ = 0x40;
951
952 dispsz = 1;
953 }
954 }
955
956 if opcsz + 1 + (rm == 4) as usize + dispsz + immsz > 15 {
957 self.last_error = Some(AsmError::InvalidOperand);
958 return true;
959 }
960
961 if self.encode_opcode(opc, epfx) {
962 return true;
963 }
964 self.buffer.put1(mod_ as u8 | (reg << 3) as u8 | rm as u8);
965 if rm == 4 {
966 self.buffer
967 .put1((scale << 6) as u8 | (idx << 3) as u8 | base as u8);
968 }
969 let offset = self.buffer.cur_offset();
970 if let Some((label, label_use)) = label_use {
971 self.buffer
972 .use_label_at_offset(offset, Label::from_id(label), label_use);
973 }
974
975 if let Some((sym, reloc)) = reloc {
976 self.buffer
977 .add_reloc_at_offset(offset, reloc, RelocTarget::Sym(sym), -4);
978 }
979 self.encode_imm(*imm(off).as_operand(), dispsz)
980 }
981}
982
983impl<'a> Emitter for Assembler<'a> {
984 fn emit(&mut self, opcode: i64, op0: &Operand, op1: &Operand, op2: &Operand, op3: &Operand) {
985 let mut opc = opcode as u64;
986 opc |= self.flags;
987 self.flags = 0;
988 let ops = &[*op0, *op1, *op2, *op3];
989
990 let mut epfx = 0;
991
992 if opc & OPC_GPH_OP0 != 0 && op0.is_reg() && op0.id() >= Gp::SP {
993 epfx |= EPFX_REX;
994 } else if opc & OPC_GPH_OP0 == 0 && op0.is_reg_type_of(RegType::Gp8Hi) {
995 self.last_error = Some(AsmError::InvalidOperand);
996 return;
997 }
998
999 if opc & OPC_GPH_OP1 != 0 && op1.is_reg() && op1.id() >= Gp::SP {
1000 epfx |= EPFX_REX;
1001 } else if opc & OPC_GPH_OP1 == 0 && op1.is_reg_type_of(RegType::Gp8Hi) {
1002 self.last_error = Some(AsmError::InvalidOperand);
1003 return;
1004 }
1005
1006 loop {
1007 macro_rules! next {
1008 () => {
1009 let alt = opc >> 56;
1010 if alt != 0 {
1011 opc = ALT_TAB[alt as usize] as u64;
1012 continue;
1013 }
1014 };
1015 ($err: expr) => {
1016 let alt = opc >> 56;
1017 if alt != 0 {
1018 opc = ALT_TAB[alt as usize] as u64;
1019 continue;
1020 } else {
1021 self.last_error = Some($err);
1022 return;
1023 }
1024 };
1025 }
1026 let enc = (opc >> 51) & 0x1f;
1027 let ei = &ENCODING_INFOS[enc as usize];
1028
1029 let mut imm = 0xcc;
1030 let mut immsz = (opc >> 47) & 0xf;
1031
1032 let mut label_use = None;
1033 let mut reloc = None;
1034 if ei.zregidx != 0 && ops[ei.zregidx as usize ^ 3].id() != ei.zregval as u32 {
1035 next!();
1036 }
1037
1038 if enc == Encoding::S as u64 {
1039 if ((op0.id() << 3) as u64 & 0x20) != (opc & 0x20) {
1040 next!();
1041 }
1042
1043 opc |= (op0.id() as u64) << 3;
1044 }
1045
1046 if ei.immctl > 0 {
1047
1048 let i = ops[ei.immidx as usize];
1049 imm = i.as_::<Imm>().value();
1050
1051 if ei.immctl == 2 {
1052 imm = i.as_::<Imm>().value() as i64;
1053 immsz = if opc & OPC_67 != 0 { 4 } else { 8 };
1054
1055 if immsz == 4 {
1056 imm = i.as_::<Imm>().value() as i32 as i64;
1057 }
1058 }
1059
1060 if ei.immctl == 3 {
1061 if !i.is_reg_type_of(RegType::Vec128) {
1062 self.last_error = Some(AsmError::InvalidOperand);
1063 return;
1064 }
1065
1066 imm = ((i.id() as u64) << 4) as i64;
1067 }
1068
1069 if ei.immctl == 6 {
1070 if i.is_label() {
1071 if immsz == 1 && opc >> 56 != 0 {
1072 next!();
1073 } else if immsz == 1 {
1074 self.last_error = Some(AsmError::InvalidInstruction);
1075 return;
1076 }
1077 label_use = Some((i.id(), LabelUse::X86JmpRel32));
1078 } else if i.is_sym() {
1079 let sym = i.as_::<Sym>();
1080 if immsz == 1 && opc >> 56 != 0 {
1081 next!();
1082 } else if immsz == 1 {
1083 self.last_error = Some(AsmError::InvalidInstruction);
1084 return;
1085 }
1086 let distance = self.buffer.symbol_distance(sym);
1087 reloc = Some((
1088 i.id(),
1089 if distance == RelocDistance::Near {
1090 Reloc::X86PCRel4
1091 } else {
1092 Reloc::X86GOTPCRel4
1093 },
1094 ));
1095 }
1096 if opc & LONG != 0 && opc >> 56 != 0 {
1097 next!();
1098 }
1099 }
1100
1101 if ei.immctl == 1 && imm != 1 {
1102 next!();
1103 }
1104 if opc & LONG != 0 && ei.immctl == 4 && opc >> 56 != 0 && immsz == 1 {
1105 next!();
1106 }
1107 if ei.immctl >= 2
1108 && !op_imm_n(*crate::core::operand::imm(imm).as_operand(), immsz as _)
1109 {
1110 next!();
1111 } else {
1112 }
1113 }
1114
1115 if opc & 0xfffffff == 0x90 && ops[0].id() == 0 {
1116 next!();
1117 }
1118
1119 if enc == Encoding::R as u64 {
1120 self.encode_memory(opc, epfx, Operand::new(), ops[0], immsz as _);
1121 } else if ei.modrm != 0 {
1122 let modreg = if ei.modreg != 0 {
1123 ops[ei.modreg as usize ^ 3]
1124 } else {
1125 *Gpd::from_id(((opc & 0xff00) >> 8) as u32).as_operand()
1126 };
1127
1128 if ei.vexreg != 0 {
1129 epfx |= (ops[ei.vexreg as usize ^ 3].id() as u64) << EPFX_VVVV_IDX;
1130 }
1131
1132 if self.encode_memory(opc, epfx, ops[ei.modrm as usize ^ 3], modreg, 0) {
1133
1134 next!(self.last_error.take().unwrap());
1135 }
1136 } else if ei.modreg != 0 {
1137 self.encode_operand(opc, epfx, ops[ei.modreg as usize ^ 3]);
1138 } else {
1139 if self.encode_opcode(opc, epfx) {
1140 return;
1141 }
1142 }
1143
1144 if ei.immctl >= 2 {
1145 let offset = self.buffer.cur_offset();
1146 if let Some((sym, reloc)) = reloc {
1147 self.buffer.add_reloc_at_offset(
1148 offset,
1149 reloc,
1150 RelocTarget::Sym(Sym::from_id(sym)),
1151 -4,
1152 );
1153 }
1154 if let Some((label_id, label_use)) = label_use {
1155 self.buffer
1156 .use_label_at_offset(offset, Label::from_id(label_id), label_use);
1157 }
1158 self.encode_imm(*crate::core::operand::imm(imm).as_operand(), immsz as _);
1159 }
1160 self.flags = 0;
1161 break;
1162 }
1163 }
1164}
1165
1166impl<'a> X86EmitterExplicit for Assembler<'a> {}