1use crate::ckb_constants::Source;
71use crate::error::SysError;
72use crate::high_level::find_cell_by_data_hash;
73use crate::syscalls::{load_cell_code, load_cell_data_raw};
74use core::cmp::{max, min};
75use core::marker::PhantomData;
76use core::mem::{size_of, zeroed};
77
78#[repr(C)]
79#[derive(Default)]
80struct Elf64Ehdr {
81 e_ident: [u8; 16],
82 e_type: u16,
83 e_machine: u16,
84 e_version: u32,
85 e_entry: u64,
86 e_phoff: u64,
87 e_shoff: u64,
88 e_flags: u32,
89 e_ehsize: u16,
90 e_phentsize: u16,
91 e_phnum: u16,
92 e_shentsize: u16,
93 e_shnum: u16,
94 e_shstrndx: u16,
95}
96
97const SHT_STRTAB: usize = 3;
98const SHT_RELA: usize = 4;
99const SHT_DYNSYM: usize = 11;
100
101#[repr(C)]
102#[derive(Default)]
103struct Elf64Shdr {
104 sh_name: u32,
105 sh_type: u32,
106 sh_flags: u64,
107 sh_addr: u64,
108 sh_offset: u64,
109 sh_size: u64,
110 sh_link: u32,
111 sh_info: u32,
112 sh_addralign: u64,
113 sh_entsize: u64,
114}
115
116const PT_LOAD: usize = 1;
117const PF_X: usize = 1;
118
119#[repr(C)]
120#[derive(Default)]
121struct Elf64Phdr {
122 p_type: u32,
123 p_flags: u32,
124 p_offset: u64,
125 p_vaddr: u64,
126 p_paddr: u64,
127 p_filesz: u64,
128 p_memsz: u64,
129 p_align: u64,
130}
131
132#[repr(C)]
133struct Elf64Sym {
134 st_name: u32,
135 st_info: u8,
136 st_other: u8,
137 st_shndx: u16,
138 st_value: u64,
139 st_size: u64,
140}
141
142const R_RISCV_RELATIVE: usize = 3;
143
144#[repr(C)]
145#[derive(Default)]
146struct Elf64Rela {
147 r_offset: u64,
148 r_info: u64,
149 r_addend: i64,
150}
151
152const RISCV_PGSIZE_SHIFT: usize = 12;
153const RISCV_PGSIZE: usize = 1 << RISCV_PGSIZE_SHIFT; fn roundup_shift(a: usize, shift_n: usize) -> usize {
157 (((a - 1) >> shift_n) + 1) << shift_n
158}
159
160#[derive(Debug, Eq, PartialEq)]
162pub enum Error {
163 ContextFailure,
165 InvalidElf,
167 MemoryNotEnough,
169 CellNotFound,
171 InvalidAlign,
173 Sys(SysError),
175}
176
177impl From<SysError> for Error {
178 fn from(error: SysError) -> Error {
179 Error::Sys(error)
180 }
181}
182
183pub struct Symbol<T> {
185 ptr: *const u8,
186 phantom: PhantomData<T>,
187}
188
189impl<T> Symbol<T> {
190 fn new(ptr: *const u8) -> Self {
191 Symbol {
192 ptr,
193 phantom: PhantomData,
194 }
195 }
196}
197
198impl<T> core::ops::Deref for Symbol<T> {
199 type Target = T;
200
201 fn deref(&self) -> &Self::Target {
202 unsafe { core::mem::transmute(&self.ptr) }
203 }
204}
205
206pub struct Library {
208 dynsyms: *const Elf64Sym,
209 dynstr: *const u8,
210 dynsym_size: usize,
211 base_addr: *const u8,
212 consumed_size: usize,
213}
214
215impl Library {
216 fn new() -> Self {
217 Library {
218 dynsyms: core::ptr::null(),
219 dynstr: core::ptr::null(),
220 dynsym_size: 0,
221 base_addr: core::ptr::null(),
222 consumed_size: 0,
223 }
224 }
225
226 pub fn consumed_size(&self) -> usize {
228 self.consumed_size
229 }
230
231 pub unsafe fn get<S>(&self, symbol: &[u8]) -> Option<Symbol<S>> {
238 unsafe {
239 unsafe fn cmp_raw_str(ptr: *const u8, s: &[u8]) -> bool {
240 unsafe {
241 let mut i = 0;
242 for c in s {
243 let sym_c = *ptr.add(i);
244 if sym_c == 0 {
246 return false;
247 }
248 if &sym_c != c {
249 return false;
250 }
251 i += 1;
252 }
253 let term_c = *ptr.add(i);
254 term_c == 0
256 }
257 }
258
259 for i in 0..self.dynsym_size {
260 let sym = self.dynsyms.add(i);
261 let str_ptr = self.dynstr.add((*sym).st_name as usize);
262 if cmp_raw_str(str_ptr, symbol) {
263 let sym_ptr = self.base_addr.add((*sym).st_value as usize);
264 return Some(Symbol::new(sym_ptr));
265 }
266 }
267
268 return None;
269 }
270 }
271}
272
273#[repr(C)]
277#[repr(align(4096))]
278pub struct CKBDLContext<T>(T);
279
280#[deprecated(
281 since = "0.7.3",
282 note = "Please use the dynamic_loading_c_impl instead"
283)]
284impl<T> CKBDLContext<T> {
285 pub unsafe fn new() -> Self {
291 unsafe { zeroed() }
292 }
293
294 pub fn load<'a>(&'a mut self, dep_cell_data_hash: &[u8]) -> Result<Library, Error> {
297 self.load_with_offset(dep_cell_data_hash, 0, size_of::<CKBDLContext<T>>())
298 }
299
300 pub fn load_with_offset<'a>(
323 &'a mut self,
324 dep_cell_data_hash: &[u8],
325 offset: usize,
326 size: usize,
327 ) -> Result<Library, Error> {
328 if size_of::<Library>() > RISCV_PGSIZE || size < RISCV_PGSIZE {
329 return Err(Error::ContextFailure);
330 }
331
332 if ((size >> RISCV_PGSIZE_SHIFT) << RISCV_PGSIZE_SHIFT) != size {
334 return Err(Error::InvalidAlign);
335 }
336
337 unsafe {
338 let aligned_size = size;
340 let aligned_addr = (&mut self.0 as *mut T).cast::<u8>().add(offset);
341 let mut library = Library::new();
342 library.base_addr = aligned_addr;
343
344 let index = find_cell_by_data_hash(dep_cell_data_hash, Source::CellDep)?
345 .ok_or(Error::CellNotFound)?;
346
347 let mut hdr = Elf64Ehdr::default();
349 let len = size_of::<Elf64Ehdr>();
350 let loaded_len = {
351 let elf_hdr_ptr = &mut hdr as *mut Elf64Ehdr;
352 match load_cell_data_raw(elf_hdr_ptr.cast(), len, 0, index, Source::CellDep) {
353 Ok(len) => len,
354 Err(SysError::LengthNotEnough(_)) => len,
355 Err(err) => return Err(err.into()),
356 }
357 };
358 if loaded_len < len {
359 return Err(Error::InvalidElf);
360 }
361 if (hdr.e_phentsize as usize != size_of::<Elf64Phdr>())
362 || (hdr.e_shentsize as usize != size_of::<Elf64Shdr>())
363 || (hdr.e_phnum > 16)
364 || (hdr.e_shnum > 32)
365 {
366 return Err(Error::InvalidElf);
367 }
368
369 let mut program_hdrs: [Elf64Phdr; 16] = Default::default();
371 let len = size_of::<Elf64Phdr>() * hdr.e_phnum as usize;
372 let loaded_len = {
373 let ptr = program_hdrs.as_mut_ptr();
374 match load_cell_data_raw(
375 ptr.cast(),
376 len,
377 hdr.e_phoff as usize,
378 index,
379 Source::CellDep,
380 ) {
381 Ok(len) => len,
382 Err(SysError::LengthNotEnough(_)) => len,
383 Err(err) => return Err(err.into()),
384 }
385 };
386 if loaded_len < len {
387 return Err(Error::InvalidElf);
388 }
389 let mut max_consumed_size = 0;
390 for ph in &program_hdrs[0..hdr.e_phnum as usize] {
391 if ph.p_type as usize == PT_LOAD && ph.p_memsz > 0 {
392 if (ph.p_flags as usize & PF_X) != 0 {
393 let prepad = ph.p_vaddr as usize % RISCV_PGSIZE;
394 let vaddr = ph.p_vaddr as usize - prepad;
395 let memsz = roundup_shift(prepad + ph.p_memsz as usize, RISCV_PGSIZE_SHIFT);
396 let size = vaddr + memsz;
397 if size > aligned_size {
398 return Err(Error::MemoryNotEnough);
399 }
400 load_cell_code(
401 aligned_addr.add(vaddr),
402 memsz,
403 ph.p_offset as usize,
404 ph.p_filesz as usize,
405 index,
406 Source::CellDep,
407 )?;
408 max_consumed_size = max(max_consumed_size, vaddr + memsz);
409 } else {
410 let filesz = ph.p_filesz as usize;
411 let size = ph.p_vaddr as usize + filesz;
412 let consumed_end: usize = roundup_shift(size, RISCV_PGSIZE_SHIFT);
413 if consumed_end > aligned_size {
414 return Err(Error::MemoryNotEnough);
415 }
416 let loaded_len = match load_cell_data_raw(
417 aligned_addr.add(ph.p_vaddr as usize),
418 filesz,
419 ph.p_offset as usize,
420 index,
421 Source::CellDep,
422 ) {
423 Ok(len) => len,
424 Err(SysError::LengthNotEnough(_)) => filesz,
425 Err(err) => return Err(err.into()),
426 };
427 if loaded_len < filesz {
428 return Err(Error::InvalidElf);
429 }
430 max_consumed_size = max(max_consumed_size, consumed_end);
431 }
432 }
433 }
434
435 let mut section_hdrs: [Elf64Shdr; 32] = Default::default();
439 let len = size_of::<Elf64Shdr>() * hdr.e_shnum as usize;
440 let loaded_len = {
441 let ptr = section_hdrs.as_mut_ptr();
442 match load_cell_data_raw(
443 ptr.cast(),
444 len,
445 hdr.e_shoff as usize,
446 index,
447 Source::CellDep,
448 ) {
449 Ok(len) => len,
450 Err(SysError::LengthNotEnough(_)) => len,
451 Err(err) => return Err(err.into()),
452 }
453 };
454 if loaded_len < len {
455 return Err(Error::InvalidElf);
456 }
457
458 let shshrtab = §ion_hdrs[hdr.e_shstrndx as usize];
461 let mut shrtab = [0u8; 4096];
462 if shshrtab.sh_size > 4096 {
463 return Err(Error::InvalidElf);
464 }
465 let shrtab_len = shshrtab.sh_size as usize;
466 let _loaded_len = {
467 let ptr = shrtab.as_mut_ptr();
468 match load_cell_data_raw(
469 ptr.cast(),
470 shrtab_len,
471 shshrtab.sh_offset as usize,
472 index,
473 Source::CellDep,
474 ) {
475 Ok(len) => len,
476 Err(SysError::LengthNotEnough(_)) => len,
477 Err(err) => return Err(err.into()),
478 }
479 };
480 if shrtab_len < shshrtab.sh_size as usize {
481 return Err(Error::InvalidElf);
482 }
483 for sh in §ion_hdrs[0..hdr.e_shnum as usize] {
484 if sh.sh_type as usize == SHT_RELA {
485 if sh.sh_entsize as usize != size_of::<Elf64Rela>() {
486 return Err(Error::InvalidElf);
487 }
488 let mut relocation_size = (sh.sh_size / sh.sh_entsize) as usize;
489 let mut current_offset = sh.sh_offset as usize;
490 while relocation_size > 0 {
491 let mut relocations: [Elf64Rela; 64] = zeroed();
492 let load_size = min(relocation_size, 64) as usize;
493 let load_length = load_size * size_of::<Elf64Rela>();
494 let loaded_len = {
495 let ptr = relocations.as_mut_ptr();
496 match load_cell_data_raw(
497 ptr.cast(),
498 load_length,
499 current_offset,
500 index,
501 Source::CellDep,
502 ) {
503 Ok(len) => len,
504 Err(SysError::LengthNotEnough(_)) => load_length,
505 Err(err) => return Err(err.into()),
506 }
507 };
508 if loaded_len < load_length {
509 return Err(Error::InvalidElf);
510 }
511 relocation_size -= load_size;
512 current_offset += len;
513 for r in &relocations[0..load_size] {
514 if r.r_info as usize != R_RISCV_RELATIVE {
515 return Err(Error::InvalidElf);
518 }
519 aligned_addr
520 .add(r.r_offset as usize)
521 .cast::<u64>()
522 .write_unaligned(
523 aligned_addr.offset(r.r_addend as isize) as usize as u64
524 );
525 }
526 }
527 } else if sh.sh_type as usize == SHT_DYNSYM {
528 if sh.sh_entsize as usize != size_of::<Elf64Sym>() {
530 return Err(Error::InvalidElf);
531 }
532 library.dynsyms = aligned_addr.add(sh.sh_offset as usize).cast();
533 library.dynsym_size = (sh.sh_size / sh.sh_entsize) as usize;
534 } else if sh.sh_type as usize == SHT_STRTAB {
535 let s = b".dynstr";
536 if &shrtab[sh.sh_name as usize..sh.sh_name as usize + s.len()] == s {
537 library.dynstr = aligned_addr.add(sh.sh_offset as usize);
538 }
539 }
540 }
541
542 if library.dynsyms.is_null() || library.dynstr.is_null() {
543 return Err(Error::InvalidElf);
544 }
545 let consumed_size = max_consumed_size + RISCV_PGSIZE;
546 library.consumed_size = consumed_size;
547 return Ok(library);
548 }
549 }
550}