ckb_debugger/
syscall_elf_dumper.rs1use byteorder::{ByteOrder, LittleEndian};
2use ckb_vm::{
3 Error, Memory, RISCV_PAGES, RISCV_PAGESIZE, Register, SupportMachine, Syscalls,
4 bytes::{BufMut, Bytes, BytesMut},
5 memory::{FLAG_EXECUTABLE, FLAG_WXORX_BIT},
6 registers::A7,
7};
8
9pub struct ElfDumper {
10 dump_file_name: String,
11 syscall_number: u64,
12 maximum_zero_gap: u64,
13}
14
15impl Default for ElfDumper {
16 fn default() -> ElfDumper {
17 ElfDumper { dump_file_name: "dump.bin".to_string(), syscall_number: 4097, maximum_zero_gap: 64 }
18 }
19}
20
21impl ElfDumper {
22 pub fn new(dump_file_name: String, syscall_number: u64, maximum_zero_gap: u64) -> Self {
23 ElfDumper { dump_file_name, syscall_number, maximum_zero_gap }
24 }
25}
26
27#[derive(Clone)]
28struct Segment {
29 start: u64,
30 data: Bytes,
31 executable: bool,
32}
33
34impl Segment {
35 fn first_page(&self) -> u64 {
36 self.start / RISCV_PAGESIZE as u64
37 }
38
39 fn first_page_address(&self) -> u64 {
40 self.first_page() * RISCV_PAGESIZE as u64
41 }
42
43 fn last_page(&self) -> u64 {
44 (self.start + self.data.len() as u64 - 1) / RISCV_PAGESIZE as u64
45 }
46}
47
48impl<Mac: SupportMachine> Syscalls<Mac> for ElfDumper {
49 fn initialize(&mut self, _machine: &mut Mac) -> Result<(), Error> {
50 Ok(())
51 }
52
53 fn ecall(&mut self, machine: &mut Mac) -> Result<bool, Error> {
54 if machine.registers()[A7].to_u64() != self.syscall_number {
55 return Ok(false);
56 }
57 let mut segments: Vec<Segment> = vec![];
58 let mut page = 0;
59 while page < RISCV_PAGES as u64 {
61 let mut start = page * RISCV_PAGESIZE as u64;
62 let end = (page + 1) * RISCV_PAGESIZE as u64;
63
64 while start < end {
65 while start < end {
67 if machine.memory_mut().load64(&Mac::REG::from_u64(start))?.to_u64() != 0 {
68 break;
69 }
70 start += 8;
71 }
72
73 if start < end {
74 let executable = machine.memory_mut().fetch_flag(page)? & FLAG_WXORX_BIT == FLAG_EXECUTABLE;
76 let (bytes_start, mut bytes_mut) = if segments.is_empty() {
77 (start, BytesMut::new())
78 } else {
79 let last_segment = &segments[segments.len() - 1];
80 let same_page = page == last_segment.last_page();
81 let gap = start - (last_segment.start + last_segment.data.len() as u64);
82 if last_segment.executable == executable && ((gap <= self.maximum_zero_gap) || same_page) {
83 let Segment { start: segment_start, data: segment_data, .. } =
84 segments.remove(segments.len() - 1);
85 let mut segment_data = BytesMut::from(segment_data.as_ref());
86 let mut zeros = vec![];
88 zeros.resize(gap as usize, 0);
89 segment_data.extend_from_slice(&zeros);
90 (segment_start, segment_data)
91 } else {
92 (start, BytesMut::new())
93 }
94 };
95
96 while start < end {
98 let value = machine.memory_mut().load64(&Mac::REG::from_u64(start))?.to_u64();
99 if value == 0 {
100 break;
101 }
102
103 bytes_mut.put_u64_le(value);
104 start += 8;
105 }
106
107 segments.push(Segment { start: bytes_start, data: bytes_mut.freeze(), executable });
108 }
109 }
110 page += 1;
111 }
112 if segments.is_empty() || segments[0].start <= RISCV_PAGESIZE as u64 {
115 return Err(Error::Unexpected("Unexpected segments".into()));
116 }
117
118 let mut register_buffer = BytesMut::new();
120 for register_value in &machine.registers()[1..] {
121 register_buffer.put_u64_le(register_value.to_u64());
122 }
123 let register_entrypoint = register_buffer.len() as u64;
124 register_buffer.put_u32_le(0x00000517); register_buffer.put_u32_le(0xf0050513); register_buffer.put_u32_le(0x00853083); register_buffer.put_u32_le(0x01053103); register_buffer.put_u32_le(0x01853183); register_buffer.put_u32_le(0x02053203); register_buffer.put_u32_le(0x02853283); register_buffer.put_u32_le(0x03053303); register_buffer.put_u32_le(0x03853383); register_buffer.put_u32_le(0x04053403); register_buffer.put_u32_le(0x04853483); register_buffer.put_u32_le(0x05853583); register_buffer.put_u32_le(0x06053603); register_buffer.put_u32_le(0x06853683); register_buffer.put_u32_le(0x07053703); register_buffer.put_u32_le(0x07853783); register_buffer.put_u32_le(0x08053803); register_buffer.put_u32_le(0x08853883); register_buffer.put_u32_le(0x09053903); register_buffer.put_u32_le(0x09853983); register_buffer.put_u32_le(0x0a053a03); register_buffer.put_u32_le(0x0a853a83); register_buffer.put_u32_le(0x0b053b03); register_buffer.put_u32_le(0x0b853b83); register_buffer.put_u32_le(0x0c053c03); register_buffer.put_u32_le(0x0c853c83); register_buffer.put_u32_le(0x0d053d03); register_buffer.put_u32_le(0x0d853d83); register_buffer.put_u32_le(0x0e053e03); register_buffer.put_u32_le(0x0e853e83); register_buffer.put_u32_le(0x0f053f03); register_buffer.put_u32_le(0x0f853f83); register_buffer.put_u32_le(0x05053503); let register_buffer_start = segments[0].first_page_address() - RISCV_PAGESIZE as u64;
159 let jump_instruction_pc = register_buffer_start + register_buffer.len() as u64;
160 let jump_offset = machine.pc().to_u64() - jump_instruction_pc;
161 let masked = jump_offset & 0xFFFFFFFFFFE00001;
162 if masked != 0 && masked != 0xFFFFFFFFFFE00000 {
163 return Err(Error::Unexpected("Unexpected masked".into()));
164 }
165 let jump_instruction = 0b1101111
166 | ((((jump_offset >> 12) & 0b_1111_1111) as u32) << 12)
167 | ((((jump_offset >> 11) & 1) as u32) << 20)
168 | ((((jump_offset >> 1) & 0b_1111_1111_11) as u32) << 21)
169 | ((((jump_offset >> 20) & 1) as u32) << 31);
170 register_buffer.put_u32_le(jump_instruction);
171 assert!(register_buffer.len() < RISCV_PAGESIZE);
172
173 segments.push(Segment { start: register_buffer_start, data: register_buffer.freeze(), executable: true });
174
175 let mut elf = BytesMut::new();
177 elf.extend_from_slice(&[
179 0x7f, 0x45, 0x4c, 0x46, 0x02, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
180 ]);
181 elf.put_u16_le(2);
183 elf.put_u16_le(243);
185 elf.put_u32_le(1);
187 elf.put_u64_le(register_buffer_start + register_entrypoint);
189 let program_header_offset = elf.len();
190 elf.put_u64_le(0);
192 let section_header_offset = elf.len();
193 elf.put_u64_le(0);
195 elf.put_u32_le(1);
197 elf.put_u16_le(64);
199 elf.put_u16_le(56);
201 let program_header_number_offset = elf.len();
202 elf.put_u16_le(0);
204 elf.put_u16_le(64);
206 let section_header_number_offset = elf.len();
207 elf.put_u16_le(0);
209 elf.put_u16_le(0);
211 assert!(elf.len() == 64);
212
213 let string_table_offset = elf.len() as u64;
214 elf.put_u32_le(0);
215
216 let mut section_headers = vec![];
217 let mut string_table_section_header = BytesMut::new();
218 string_table_section_header.put_u32_le(0);
220 string_table_section_header.put_u32_le(3);
222 string_table_section_header.put_u64_le(0);
224 string_table_section_header.put_u64_le(0);
226 string_table_section_header.put_u64_le(string_table_offset);
228 string_table_section_header.put_u64_le(4);
230 string_table_section_header.put_u32_le(0);
232 string_table_section_header.put_u32_le(0);
234 string_table_section_header.put_u64_le(1);
236 string_table_section_header.put_u64_le(0);
238 assert!(string_table_section_header.len() == 64);
239 section_headers.push(string_table_section_header.freeze());
240
241 let mut program_headers = vec![];
242
243 for segment in segments {
244 let current_offset = elf.len() as u64;
245 elf.extend_from_slice(segment.data.as_ref());
246
247 let mut program_header = BytesMut::new();
248 program_header.put_u32_le(1);
250 program_header.put_u32_le(if segment.executable { 5 } else { 6 });
252 program_header.put_u64_le(current_offset);
254 program_header.put_u64_le(segment.start);
256 program_header.put_u64_le(segment.start);
258 program_header.put_u64_le(segment.data.len() as u64);
260 program_header.put_u64_le(segment.data.len() as u64);
262 program_header.put_u64_le(0x1000);
264 assert!(program_header.len() == 56);
265 program_headers.push(program_header.freeze());
266
267 if segment.executable {
271 let mut section_header = BytesMut::new();
272 section_header.put_u32_le(0);
274 section_header.put_u32_le(1);
276 section_header.put_u64_le(6);
278 section_header.put_u64_le(segment.start);
280 section_header.put_u64_le(current_offset);
282 section_header.put_u64_le(segment.data.len() as u64);
284 section_header.put_u32_le(0);
286 section_header.put_u32_le(0);
288 section_header.put_u64_le(2);
290 section_header.put_u64_le(0);
292 assert!(section_header.len() == 64);
293 section_headers.push(section_header.freeze());
294 }
295 }
296
297 while elf.len() % 4 != 0 {
298 elf.put_u8(0);
299 }
300 let current_offset = elf.len() as u64;
301 LittleEndian::write_u64(&mut elf[program_header_offset..program_header_offset + 8], current_offset);
302 LittleEndian::write_u16(
303 &mut elf[program_header_number_offset..program_header_number_offset + 8],
304 program_headers.len() as u16,
305 );
306 for program_header in program_headers {
307 elf.extend_from_slice(program_header.as_ref());
308 }
309
310 while elf.len() % 4 != 0 {
311 elf.put_u8(0);
312 }
313 let current_offset = elf.len() as u64;
314 LittleEndian::write_u64(&mut elf[section_header_offset..section_header_offset + 8], current_offset);
315 LittleEndian::write_u16(
316 &mut elf[section_header_number_offset..section_header_number_offset + 8],
317 section_headers.len() as u16,
318 );
319 for section_header in section_headers {
320 elf.extend_from_slice(section_header.as_ref());
321 }
322 crate::arch::file_write(&self.dump_file_name, &elf)?;
323 Ok(true)
324 }
325}