1use crate::emitter::Emitter;
2use crate::error::AotError;
3use ckb_vm::decoder::build_decoder;
4use ckb_vm::instructions::ast::Value;
5use ckb_vm::instructions::{blank_instruction, execute_instruction, extract_opcode};
6use ckb_vm::instructions::{
7 execute, instruction_length, is_basic_block_end_instruction, is_slowpath_instruction,
8 Instruction,
9};
10use ckb_vm::machine::asm::{ckb_vm_asm_labels, ckb_vm_x64_execute, AsmCoreMachine};
11use ckb_vm::machine::{elf_adaptor, DefaultMachine, SupportMachine, VERSION0, VERSION1};
12use ckb_vm::{
13 Bytes, CoreMachine, DefaultCoreMachine, Error, FlatMemory, InstructionCycleFunc, Machine,
14 Memory, Register, RISCV_MAX_MEMORY,
15};
16use ckb_vm_definitions::{
17 asm::{
18 calculate_slot, Trace, RET_CYCLES_OVERFLOW, RET_DECODE_TRACE, RET_DYNAMIC_JUMP, RET_EBREAK,
19 RET_ECALL, RET_INVALID_PERMISSION, RET_MAX_CYCLES_EXCEEDED, RET_OUT_OF_BOUND, RET_SLOWPATH,
20 TRACE_ITEM_LENGTH,
21 },
22 instructions::OP_CUSTOM_TRACE_END,
23 ISA_MOP,
24};
25use memmap::{Mmap, MmapMut};
26use scroll::Pread;
27use std::collections::{HashMap, HashSet};
28use std::rc::Rc;
29
30const MAXIMUM_INSTRUCTIONS_PER_BLOCK: usize = 1024;
31const MAXIMUM_LABELS: usize = 65535;
32const MAXIMUM_SECTIONS: usize = 1024;
33const MAXIMUM_DUMMY_SECTIONS: usize = 64;
34
35const ADDRESS_WRITE_ONLY_FLAG: u64 = 0x8000_0000_0000_0000;
36const ADDRESS_LABEL_FLAG: u64 = 0x4000_0000_0000_0000;
37const MAXIMUM_ENCODED_ADDRESS: u64 = 0x8000_0000;
38
39#[derive(Debug, Clone)]
40pub enum Write {
41 Memory {
42 address: Value,
43 size: u8,
44 value: Value,
45 },
46 Register {
47 index: usize,
48 value: Value,
49 },
50 Pc {
51 value: Value,
52 },
53 Ecall,
54 Ebreak,
55 Slowpath,
56}
57
58fn init_registers() -> [Value; 32] {
59 [
60 Value::Imm(0),
61 Value::Register(1),
62 Value::Register(2),
63 Value::Register(3),
64 Value::Register(4),
65 Value::Register(5),
66 Value::Register(6),
67 Value::Register(7),
68 Value::Register(8),
69 Value::Register(9),
70 Value::Register(10),
71 Value::Register(11),
72 Value::Register(12),
73 Value::Register(13),
74 Value::Register(14),
75 Value::Register(15),
76 Value::Register(16),
77 Value::Register(17),
78 Value::Register(18),
79 Value::Register(19),
80 Value::Register(20),
81 Value::Register(21),
82 Value::Register(22),
83 Value::Register(23),
84 Value::Register(24),
85 Value::Register(25),
86 Value::Register(26),
87 Value::Register(27),
88 Value::Register(28),
89 Value::Register(29),
90 Value::Register(30),
91 Value::Register(31),
92 ]
93}
94
95struct LabelGatheringMachine {
96 registers: [Value; 32],
97 pc: Value,
98 next_pc: Value,
99 labels_to_test: Vec<u64>,
100 isa: u8,
101 version: u32,
102
103 memory: FlatMemory<u64>,
105 labels: HashSet<u64>,
106 sections: Vec<(u64, u64)>,
107 dummy_sections: HashMap<u64, u64>,
108}
109
110impl LabelGatheringMachine {
111 pub fn load(program: &Bytes, isa: u8, version: u32) -> Result<Self, Error> {
112 let section_headers: Vec<elf_adaptor::SectionHeader> = if version < VERSION1 {
113 use goblin_v023::container::Ctx;
114 use goblin_v023::elf::{Header, SectionHeader};
115
116 let header = program.pread::<Header>(0)?;
117 let container = header.container().map_err(|_e| Error::ElfBits)?;
118 let endianness = header.endianness().map_err(|_e| Error::ElfBits)?;
119 if <Self as CoreMachine>::REG::BITS != if container.is_big() { 64 } else { 32 } {
120 return Err(Error::ElfBits);
121 }
122 let ctx = Ctx::new(container, endianness);
123 SectionHeader::parse(
124 program,
125 header.e_shoff as usize,
126 header.e_shnum as usize,
127 ctx,
128 )?
129 .iter()
130 .map(elf_adaptor::SectionHeader::from_v0)
131 .collect()
132 } else {
133 use goblin_v040::container::Ctx;
134 use goblin_v040::elf::{Header, SectionHeader};
135
136 let header = program.pread::<Header>(0)?;
137 let container = header.container().map_err(|_e| Error::ElfBits)?;
138 let endianness = header.endianness().map_err(|_e| Error::ElfBits)?;
139 if <Self as CoreMachine>::REG::BITS != if container.is_big() { 64 } else { 32 } {
140 return Err(Error::ElfBits);
141 }
142 let ctx = Ctx::new(container, endianness);
143 SectionHeader::parse(
144 program,
145 header.e_shoff as usize,
146 header.e_shnum as usize,
147 ctx,
148 )?
149 .iter()
150 .map(elf_adaptor::SectionHeader::from_v1)
151 .collect()
152 };
153 if section_headers.len() > MAXIMUM_SECTIONS {
154 return Err(Error::External(
155 AotError::LimitReachedMaximumSections.to_string(),
156 ));
157 }
158 let mut sections: Vec<(u64, u64)> = section_headers
159 .iter()
160 .filter_map(|section_header| {
161 if section_header.sh_flags & u64::from(elf_adaptor::SHF_EXECINSTR) != 0 {
162 Some((
163 section_header.sh_addr,
164 section_header.sh_addr.wrapping_add(section_header.sh_size),
165 ))
166 } else {
167 None
168 }
169 })
170 .rev()
171 .collect();
172 if sections.iter().any(|(s, e)| s >= e) {
174 return Err(Error::External(AotError::SectionIsEmpty.to_string()));
175 }
176 sections.sort_by_key(|section| section.0);
180 if sections.windows(2).any(|w| w[0].1 > w[1].0) {
181 return Err(Error::External(AotError::SectionOverlaps.to_string()));
182 }
183 let mut inner = DefaultCoreMachine::new(isa, version, 0);
186 inner.load_elf(program, false)?;
187
188 Ok(Self {
189 isa,
190 version,
191 registers: init_registers(),
192 pc: Value::from_u64(0),
193 next_pc: Value::from_u64(0),
194 labels: HashSet::default(),
195 labels_to_test: Vec::new(),
196 memory: inner.take_memory(),
197 sections,
198 dummy_sections: HashMap::default(),
199 })
200 }
201
202 fn read_pc(&self) -> Result<u64, Error> {
203 match &self.pc {
204 Value::Imm(pc) => Ok(*pc),
205 _ => Err(Error::Unexpected(String::from("Unexpected value type"))),
206 }
207 }
208
209 pub fn gather(&mut self) -> Result<(), Error> {
210 let mut decoder = build_decoder::<u64>(self.isa(), self.version());
211 for i in 0..self.sections.len() {
212 let (section_start, section_end) = self.sections[i];
213 self.pc = Value::from_u64(section_start);
214 let mut start_of_basic_block = true;
215 while self.read_pc()? < section_end {
216 let pc = self.read_pc()?;
217 match decoder.decode(&mut self.memory, pc) {
218 Ok(instruction) => {
219 if start_of_basic_block {
220 self.labels.insert(pc);
221 }
222 start_of_basic_block = is_basic_block_end_instruction(instruction);
223 let next_pc = pc + u64::from(instruction_length(instruction));
224 self.update_pc(Value::from_u64(next_pc));
225 execute(instruction, self)?;
226 for label in self.labels_to_test.drain(..) {
227 if label != next_pc && label < section_end && label >= section_start {
228 self.labels.insert(label);
229 }
230 }
231 if self.labels.len() > MAXIMUM_LABELS {
232 return Err(Error::External(
233 AotError::LimitReachedMaximumLabels.to_string(),
234 ));
235 }
236 self.pc = Value::from_u64(next_pc);
237 }
238 Err(Error::InvalidInstruction {
239 pc: _,
240 instruction: i,
241 }) if i == 0 => {
242 if !start_of_basic_block {
261 return Err(Error::External(
262 AotError::OutOfBoundDueToNotStartOfBasicBlock.to_string(),
263 ));
264 }
265 let mut dummy_end = pc + 2;
266 while dummy_end < section_end && self.memory.execute_load16(dummy_end)? == 0
267 {
268 dummy_end += 2;
269 }
270 self.dummy_sections.insert(pc, dummy_end);
273 if self.dummy_sections.len() > MAXIMUM_DUMMY_SECTIONS {
274 return Err(Error::External(
275 AotError::LimitReachedMaximumDummySections.to_string(),
276 ));
277 }
278 self.pc = Value::from_u64(dummy_end);
279 }
280 Err(e) => return Err(e),
281 }
282 }
283 if !start_of_basic_block {
286 return Err(Error::External(
287 AotError::OutOfBoundDueToNotStartOfBasicBlock.to_string(),
288 ));
289 }
290 debug_assert!(!self.labels.contains(§ion_end));
291 }
292 for (dummy_start, dummy_end) in &self.dummy_sections {
295 self.labels
296 .retain(|label| *label < *dummy_start || *label >= *dummy_end);
297 }
298 Ok(())
299 }
300}
301
302impl CoreMachine for LabelGatheringMachine {
303 type REG = Value;
304 type MEM = Self;
305
306 fn pc(&self) -> &Value {
307 &self.pc
308 }
309
310 fn update_pc(&mut self, pc: Self::REG) {
311 self.next_pc = pc;
312 }
313
314 fn commit_pc(&mut self) {
315 match self.next_pc.clone() {
316 Value::Imm(pc) => self.labels_to_test.push(pc),
317 Value::Cond(_, t, f) => {
318 if let (Value::Imm(t), Value::Imm(f)) = (&*t, &*f) {
319 self.labels_to_test.push(*t);
320 self.labels_to_test.push(*f);
321 }
322 }
323 _ => (),
324 }
325 }
326
327 fn memory(&self) -> &Self {
328 self
329 }
330
331 fn memory_mut(&mut self) -> &mut Self {
332 self
333 }
334
335 fn registers(&self) -> &[Value] {
336 &self.registers
337 }
338
339 fn set_register(&mut self, _idx: usize, _value: Value) {
340 }
342
343 fn isa(&self) -> u8 {
344 self.isa
345 }
346
347 fn version(&self) -> u32 {
348 self.version
349 }
350}
351
352impl Machine for LabelGatheringMachine {
353 fn ecall(&mut self) -> Result<(), Error> {
354 Ok(())
357 }
358
359 fn ebreak(&mut self) -> Result<(), Error> {
360 Ok(())
363 }
364}
365
366impl Memory for LabelGatheringMachine {
367 type REG = Value;
368
369 fn init_pages(
370 &mut self,
371 _addr: u64,
372 _size: u64,
373 _flags: u8,
374 _source: Option<Bytes>,
375 _offset_from_addr: u64,
376 ) -> Result<(), Error> {
377 Err(Error::Unimplemented)
378 }
379
380 fn fetch_flag(&mut self, _page: u64) -> Result<u8, Error> {
381 Err(Error::Unimplemented)
382 }
383
384 fn set_flag(&mut self, _page: u64, _flag: u8) -> Result<(), Error> {
385 Err(Error::Unimplemented)
386 }
387
388 fn clear_flag(&mut self, _page: u64, _flag: u8) -> Result<(), Error> {
389 Err(Error::Unimplemented)
390 }
391
392 fn store_byte(&mut self, _addr: u64, _size: u64, _value: u8) -> Result<(), Error> {
393 Err(Error::Unimplemented)
394 }
395
396 fn store_bytes(&mut self, _addr: u64, _value: &[u8]) -> Result<(), Error> {
397 Err(Error::Unimplemented)
398 }
399
400 fn execute_load16(&mut self, _addr: u64) -> Result<u16, Error> {
401 Err(Error::Unimplemented)
402 }
403
404 fn execute_load32(&mut self, _addr: u64) -> Result<u32, Error> {
405 Err(Error::Unimplemented)
406 }
407
408 fn load8(&mut self, addr: &Value) -> Result<Value, Error> {
409 Ok(Value::Load(Rc::new(addr.clone()), 1))
410 }
411
412 fn load16(&mut self, addr: &Value) -> Result<Value, Error> {
413 Ok(Value::Load(Rc::new(addr.clone()), 2))
414 }
415
416 fn load32(&mut self, addr: &Value) -> Result<Value, Error> {
417 Ok(Value::Load(Rc::new(addr.clone()), 4))
418 }
419
420 fn load64(&mut self, addr: &Value) -> Result<Value, Error> {
421 Ok(Value::Load(Rc::new(addr.clone()), 8))
422 }
423
424 fn store8(&mut self, _addr: &Value, _value: &Value) -> Result<(), Error> {
425 Ok(())
426 }
427
428 fn store16(&mut self, _addr: &Value, _value: &Value) -> Result<(), Error> {
429 Ok(())
430 }
431
432 fn store32(&mut self, _addr: &Value, _value: &Value) -> Result<(), Error> {
433 Ok(())
434 }
435
436 fn store64(&mut self, _addr: &Value, _value: &Value) -> Result<(), Error> {
437 Ok(())
438 }
439}
440
441pub struct AotCode {
442 pub code: Mmap,
443 pub labels: HashMap<u64, u32>,
447}
448
449impl AotCode {
450 pub fn base_address(&self) -> u64 {
451 self.code.as_ptr() as u64
452 }
453}
454
455pub struct AotCompilingMachine {
456 isa: u8,
457 version: u32,
458 registers: [Value; 32],
459 pc: Value,
460 next_pc: Value,
461 emitter: Emitter,
462 memory: FlatMemory<u64>,
463 sections: Vec<(u64, u64)>,
464 dummy_sections: HashMap<u64, u64>,
465 addresses_to_labels: HashMap<u64, u32>,
466 writes: Vec<Write>,
467 next_pc_write: Option<Value>,
468 instruction_cycle_func: Option<Box<InstructionCycleFunc>>,
469}
470
471impl AotCompilingMachine {
472 pub fn load(
473 program: &Bytes,
474 instruction_cycle_func: Option<Box<InstructionCycleFunc>>,
475 isa: u8,
476 version: u32,
477 ) -> Result<Self, Error> {
478 let mut label_gathering_machine = LabelGatheringMachine::load(program, isa, version)?;
480 label_gathering_machine.gather()?;
481
482 let mut labels: Vec<u64> = label_gathering_machine.labels.iter().cloned().collect();
483 labels.sort_unstable();
484 let addresses_to_labels = labels
485 .iter()
486 .enumerate()
487 .map(|(i, address)| (*address, i as u32))
488 .collect();
489
490 Ok(Self {
491 isa,
492 version,
493 registers: init_registers(),
494 pc: Value::from_u64(0),
495 next_pc: Value::from_u64(0),
496 emitter: Emitter::new(labels.len(), version)?,
497 addresses_to_labels,
498 memory: label_gathering_machine.memory,
499 sections: label_gathering_machine.sections,
500 dummy_sections: label_gathering_machine.dummy_sections,
501 writes: vec![],
502 next_pc_write: None,
503 instruction_cycle_func,
504 })
505 }
506
507 fn read_pc(&self) -> Result<u64, Error> {
508 match &self.pc {
509 Value::Imm(pc) => Ok(*pc),
510 _ => Err(Error::Unexpected(String::from("Unexpected value type"))),
511 }
512 }
513
514 fn take_and_clear_writes(&mut self) -> Vec<Write> {
515 std::mem::take(&mut self.writes)
516 }
517
518 fn emit_block(&mut self, instructions: &[Instruction]) -> Result<(), Error> {
519 let mut cycles = 0;
520 let mut initial_writes = vec![];
531
532 for instruction in instructions.iter() {
533 cycles += self
534 .instruction_cycle_func
535 .as_ref()
536 .map(|f| f(*instruction))
537 .unwrap_or(0);
538 }
539 self.emitter.emit_add_cycles(cycles)?;
540
541 for (i, instruction) in instructions.iter().enumerate() {
542 if i == instructions.len() - 1 {
543 initial_writes = self.take_and_clear_writes();
544 }
545 let pc = self.read_pc()?;
546 let length = instruction_length(*instruction);
547 if is_slowpath_instruction(*instruction) {
548 self.writes.push(Write::Slowpath);
549 } else {
550 execute(*instruction, self)?;
551 }
552 self.pc = Value::from_u64(pc + u64::from(length));
553 }
554 let pc = self.read_pc()?;
555 if pc >= RISCV_MAX_MEMORY as u64 {
557 return Err(Error::MemOutOfBound);
558 }
559 self.emitter.emit(&Write::Pc {
560 value: Value::Imm(pc | ADDRESS_WRITE_ONLY_FLAG),
561 })?;
562 for write in initial_writes {
563 self.emitter.emit(&write)?;
564 }
565 let mut last_writes = self.take_and_clear_writes();
566 if let Some(value) = self.next_pc_write.take() {
567 last_writes.push(Write::Pc {
568 value: self.optimize_pc_value(value)?,
569 });
570 }
571 let all_normal_writes = last_writes
573 .iter()
574 .all(|write| matches!(write, Write::Register { .. } | Write::Pc { .. }));
575 if self.version >= VERSION1 && last_writes.len() > 1 && all_normal_writes {
576 self.emitter.emit_writes(&last_writes)?;
577 } else {
578 for write in last_writes {
579 self.emitter.emit(&write)?;
580 }
581 }
582 Ok(())
583 }
584
585 pub fn compile(&mut self) -> Result<AotCode, Error> {
586 let mut decoder = build_decoder::<u64>(self.isa(), self.version());
587 let mut instructions = [Instruction::default(); MAXIMUM_INSTRUCTIONS_PER_BLOCK];
588 for i in 0..self.sections.len() {
589 let (section_start, section_end) = self.sections[i];
590 self.pc = Value::from_u64(section_start);
591 loop {
592 let pc = self.read_pc()?;
593 if pc >= section_end {
594 break;
595 }
596 if let Some(dummy_end) = self.dummy_sections.get(&pc) {
597 self.pc = Value::from_u64(*dummy_end);
598 continue;
599 }
600 if let Some(label) = self.addresses_to_labels.get(&pc) {
601 self.emitter.emit_label(*label)?;
602 }
603 let mut count = 0;
604 let mut current_pc = pc;
605 while count < MAXIMUM_INSTRUCTIONS_PER_BLOCK && current_pc < section_end {
606 let instruction = decoder.decode(&mut self.memory, current_pc)?;
607 instructions[count] = instruction;
608 count += 1;
609 current_pc += u64::from(instruction_length(instruction));
610 if is_basic_block_end_instruction(instruction)
611 || self.addresses_to_labels.contains_key(¤t_pc)
612 {
613 break;
614 }
615 }
616 self.emit_block(&instructions[0..count])?;
617 }
618 }
619 let encoded_size = self.emitter.link()?;
620 let mut buffer_mut = MmapMut::map_anon(encoded_size)?;
621 self.emitter.encode(&mut buffer_mut[..])?;
622 let code = buffer_mut.make_exec()?;
623 let mut labels = HashMap::default();
624 for (address, label) in &self.addresses_to_labels {
625 let offset = self.emitter.get_label_offset(*label)?;
626 labels.insert(*address, offset);
627 }
628 Ok(AotCode { code, labels })
629 }
630
631 fn optimize_pc_value(&self, value: Value) -> Result<Value, Error> {
635 match value {
636 Value::Imm(v) => Ok(Value::Imm(self.optimize_pc(v)?)),
637 Value::Cond(c, t, f) => Ok(match (&*t, &*f) {
638 (Value::Imm(t), Value::Imm(f)) => Value::Cond(
639 c,
640 Rc::new(Value::Imm(self.optimize_pc(*t)?)),
641 Rc::new(Value::Imm(self.optimize_pc(*f)?)),
642 ),
643 _ => Value::Cond(c, t, f),
644 }),
645 _ => Ok(value),
646 }
647 }
648
649 fn optimize_pc(&self, pc: u64) -> Result<u64, Error> {
650 if pc >= RISCV_MAX_MEMORY as u64 {
651 return Err(Error::MemOutOfBound);
652 }
653 if pc < MAXIMUM_ENCODED_ADDRESS {
654 if let Some(label) = self.addresses_to_labels.get(&pc) {
655 return Ok(pc | (u64::from(*label) << 32) | ADDRESS_LABEL_FLAG);
656 }
657 }
658 Ok(pc)
659 }
660}
661
662impl CoreMachine for AotCompilingMachine {
663 type REG = Value;
664 type MEM = Self;
665
666 fn pc(&self) -> &Value {
667 &self.pc
668 }
669
670 fn update_pc(&mut self, pc: Self::REG) {
671 self.next_pc = pc;
672 }
673
674 fn commit_pc(&mut self) {
675 self.next_pc_write = Some(self.next_pc.clone())
676 }
677
678 fn memory(&self) -> &Self {
679 self
680 }
681
682 fn memory_mut(&mut self) -> &mut Self {
683 self
684 }
685
686 fn registers(&self) -> &[Value] {
687 &self.registers
688 }
689
690 fn set_register(&mut self, index: usize, value: Value) {
691 self.writes.push(Write::Register { index, value });
692 }
693
694 fn isa(&self) -> u8 {
695 self.isa
696 }
697
698 fn version(&self) -> u32 {
699 self.version
700 }
701}
702
703impl Machine for AotCompilingMachine {
704 fn ecall(&mut self) -> Result<(), Error> {
705 self.writes.push(Write::Ecall);
706 Ok(())
707 }
708
709 fn ebreak(&mut self) -> Result<(), Error> {
710 self.writes.push(Write::Ebreak);
711 Ok(())
712 }
713}
714
715impl Memory for AotCompilingMachine {
716 type REG = Value;
717
718 fn init_pages(
719 &mut self,
720 _addr: u64,
721 _size: u64,
722 _flags: u8,
723 _source: Option<Bytes>,
724 _offset_from_addr: u64,
725 ) -> Result<(), Error> {
726 Err(Error::Unimplemented)
727 }
728
729 fn fetch_flag(&mut self, _page: u64) -> Result<u8, Error> {
730 Err(Error::Unimplemented)
731 }
732
733 fn set_flag(&mut self, _page: u64, _flag: u8) -> Result<(), Error> {
734 Err(Error::Unimplemented)
735 }
736
737 fn clear_flag(&mut self, _page: u64, _flag: u8) -> Result<(), Error> {
738 Err(Error::Unimplemented)
739 }
740
741 fn store_byte(&mut self, _addr: u64, _size: u64, _value: u8) -> Result<(), Error> {
742 Err(Error::Unimplemented)
743 }
744
745 fn store_bytes(&mut self, _addr: u64, _value: &[u8]) -> Result<(), Error> {
746 Err(Error::Unimplemented)
747 }
748
749 fn execute_load16(&mut self, _addr: u64) -> Result<u16, Error> {
750 Err(Error::Unimplemented)
751 }
752
753 fn execute_load32(&mut self, _addr: u64) -> Result<u32, Error> {
754 Err(Error::Unimplemented)
755 }
756
757 fn load8(&mut self, addr: &Value) -> Result<Value, Error> {
758 Ok(Value::Load(Rc::new(addr.clone()), 1))
759 }
760
761 fn load16(&mut self, addr: &Value) -> Result<Value, Error> {
762 Ok(Value::Load(Rc::new(addr.clone()), 2))
763 }
764
765 fn load32(&mut self, addr: &Value) -> Result<Value, Error> {
766 Ok(Value::Load(Rc::new(addr.clone()), 4))
767 }
768
769 fn load64(&mut self, addr: &Value) -> Result<Value, Error> {
770 Ok(Value::Load(Rc::new(addr.clone()), 8))
771 }
772
773 fn store8(&mut self, addr: &Value, value: &Value) -> Result<(), Error> {
774 self.writes.push(Write::Memory {
775 address: addr.clone(),
776 size: 1,
777 value: value.clone(),
778 });
779 Ok(())
780 }
781
782 fn store16(&mut self, addr: &Value, value: &Value) -> Result<(), Error> {
783 self.writes.push(Write::Memory {
784 address: addr.clone(),
785 size: 2,
786 value: value.clone(),
787 });
788 Ok(())
789 }
790
791 fn store32(&mut self, addr: &Value, value: &Value) -> Result<(), Error> {
792 self.writes.push(Write::Memory {
793 address: addr.clone(),
794 size: 4,
795 value: value.clone(),
796 });
797 Ok(())
798 }
799
800 fn store64(&mut self, addr: &Value, value: &Value) -> Result<(), Error> {
801 self.writes.push(Write::Memory {
802 address: addr.clone(),
803 size: 8,
804 value: value.clone(),
805 });
806 Ok(())
807 }
808}
809
810pub struct AotMachine<'a> {
811 pub machine: DefaultMachine<Box<AsmCoreMachine>>,
812 pub aot_code: Option<&'a AotCode>,
813}
814
815impl<'a> AotMachine<'a> {
816 pub fn new(
817 machine: DefaultMachine<Box<AsmCoreMachine>>,
818 aot_code: Option<&'a AotCode>,
819 ) -> Self {
820 Self { machine, aot_code }
821 }
822
823 pub fn set_max_cycles(&mut self, cycles: u64) {
824 self.machine.set_cycles(cycles)
825 }
826
827 pub fn load_program(&mut self, program: &Bytes, args: &[Bytes]) -> Result<u64, Error> {
828 self.machine.load_program(program, args)
829 }
830
831 pub fn run(&mut self) -> Result<i8, Error> {
832 if self.machine.isa() & ISA_MOP != 0 && self.machine.version() == VERSION0 {
833 return Err(Error::InvalidVersion);
834 }
835 let mut decoder = build_decoder::<u64>(self.machine.isa(), self.machine.version());
836 self.machine.set_running(true);
837 while self.machine.running() {
838 if self.machine.reset_signal() {
839 decoder.reset_instructions_cache();
840 self.aot_code = None;
841 }
842 let result = if let Some(aot_code) = &self.aot_code {
843 if let Some(offset) = aot_code.labels.get(self.machine.pc()) {
844 let base_address = aot_code.base_address();
845 let offset_address = base_address + u64::from(*offset);
846 let f = unsafe {
847 std::mem::transmute::<u64, fn(*mut AsmCoreMachine, u64) -> u8>(base_address)
848 };
849 f(&mut (**self.machine.inner_mut()), offset_address)
850 } else {
851 unsafe { ckb_vm_x64_execute(&mut (**self.machine.inner_mut())) }
852 }
853 } else {
854 unsafe { ckb_vm_x64_execute(&mut (**self.machine.inner_mut())) }
855 };
856 match result {
857 RET_DECODE_TRACE => {
858 let pc = *self.machine.pc();
859 let slot = calculate_slot(pc);
860 let mut trace = Trace::default();
861 let mut current_pc = pc;
862 let mut i = 0;
863 while i < TRACE_ITEM_LENGTH {
864 let instruction = decoder.decode(self.machine.memory_mut(), current_pc)?;
865 let end_instruction = is_basic_block_end_instruction(instruction);
866 current_pc += u64::from(instruction_length(instruction));
867 trace.instructions[i] = instruction;
868 trace.cycles += self.machine.instruction_cycle_func()(instruction);
869 let opcode = extract_opcode(instruction);
870 trace.thread[i] = unsafe {
873 u64::from(
874 *(ckb_vm_asm_labels as *const u32).offset(opcode as u8 as isize),
875 ) + (ckb_vm_asm_labels as *const u32 as u64)
876 };
877 i += 1;
878 if end_instruction {
879 break;
880 }
881 }
882 trace.instructions[i] = blank_instruction(OP_CUSTOM_TRACE_END);
883 trace.thread[i] = unsafe {
884 u64::from(
885 *(ckb_vm_asm_labels as *const u32).offset(OP_CUSTOM_TRACE_END as isize),
886 ) + (ckb_vm_asm_labels as *const u32 as u64)
887 };
888 trace.address = pc;
889 trace.length = (current_pc - pc) as u8;
890 self.machine.inner_mut().traces[slot] = trace;
891 }
892 RET_ECALL => self.machine.ecall()?,
893 RET_EBREAK => self.machine.ebreak()?,
894 RET_DYNAMIC_JUMP => (),
895 RET_MAX_CYCLES_EXCEEDED => return Err(Error::CyclesExceeded),
896 RET_CYCLES_OVERFLOW => return Err(Error::CyclesOverflow),
897 RET_OUT_OF_BOUND => return Err(Error::MemOutOfBound),
898 RET_INVALID_PERMISSION => return Err(Error::MemWriteOnExecutablePage),
899 RET_SLOWPATH => {
900 let pc = *self.machine.pc() - 4;
901 let instruction = decoder.decode(self.machine.memory_mut(), pc)?;
902 execute_instruction(instruction, &mut self.machine)?;
903 }
904 _ => return Err(Error::Asm(result)),
905 }
906 }
907 Ok(self.machine.exit_code())
908 }
909}