1use crate::ckb_constants::Source;
71use crate::error::SysError;
72use crate::high_level::find_cell_by_data_hash;
73use crate::syscalls::{load_cell_code, load_cell_data_raw};
74use core::cmp::{max, min};
75use core::marker::PhantomData;
76use core::mem::{size_of, zeroed};
77
78#[repr(C)]
79#[derive(Default)]
80struct Elf64Ehdr {
81 e_ident: [u8; 16],
82 e_type: u16,
83 e_machine: u16,
84 e_version: u32,
85 e_entry: u64,
86 e_phoff: u64,
87 e_shoff: u64,
88 e_flags: u32,
89 e_ehsize: u16,
90 e_phentsize: u16,
91 e_phnum: u16,
92 e_shentsize: u16,
93 e_shnum: u16,
94 e_shstrndx: u16,
95}
96
97const SHT_STRTAB: usize = 3;
98const SHT_RELA: usize = 4;
99const SHT_DYNSYM: usize = 11;
100
101#[repr(C)]
102#[derive(Default)]
103struct Elf64Shdr {
104 sh_name: u32,
105 sh_type: u32,
106 sh_flags: u64,
107 sh_addr: u64,
108 sh_offset: u64,
109 sh_size: u64,
110 sh_link: u32,
111 sh_info: u32,
112 sh_addralign: u64,
113 sh_entsize: u64,
114}
115
116const PT_LOAD: usize = 1;
117const PF_X: usize = 1;
118
119#[repr(C)]
120#[derive(Default)]
121struct Elf64Phdr {
122 p_type: u32,
123 p_flags: u32,
124 p_offset: u64,
125 p_vaddr: u64,
126 p_paddr: u64,
127 p_filesz: u64,
128 p_memsz: u64,
129 p_align: u64,
130}
131
132#[repr(C)]
133struct Elf64Sym {
134 st_name: u32,
135 st_info: u8,
136 st_other: u8,
137 st_shndx: u16,
138 st_value: u64,
139 st_size: u64,
140}
141
142const R_RISCV_RELATIVE: usize = 3;
143
144#[repr(C)]
145#[derive(Default)]
146struct Elf64Rela {
147 r_offset: u64,
148 r_info: u64,
149 r_addend: i64,
150}
151
152const RISCV_PGSIZE_SHIFT: usize = 12;
153const RISCV_PGSIZE: usize = 1 << RISCV_PGSIZE_SHIFT; fn roundup_shift(a: usize, shift_n: usize) -> usize {
157 (((a - 1) >> shift_n) + 1) << shift_n
158}
159
160#[derive(Debug, Eq, PartialEq)]
162pub enum Error {
163 ContextFailure,
165 InvalidElf,
167 MemoryNotEnough,
169 CellNotFound,
171 InvalidAlign,
173 Sys(SysError),
175}
176
177impl From<SysError> for Error {
178 fn from(error: SysError) -> Error {
179 Error::Sys(error)
180 }
181}
182
183pub struct Symbol<T> {
185 ptr: *const u8,
186 phantom: PhantomData<T>,
187}
188
189impl<T> Symbol<T> {
190 fn new(ptr: *const u8) -> Self {
191 Symbol {
192 ptr,
193 phantom: PhantomData,
194 }
195 }
196}
197
198impl<T> core::ops::Deref for Symbol<T> {
199 type Target = T;
200
201 fn deref(&self) -> &Self::Target {
202 unsafe { core::mem::transmute(&self.ptr) }
203 }
204}
205
206pub struct Library {
208 dynsyms: *const Elf64Sym,
209 dynstr: *const u8,
210 dynsym_size: usize,
211 base_addr: *const u8,
212 consumed_size: usize,
213}
214
215impl Library {
216 fn new() -> Self {
217 Library {
218 dynsyms: core::ptr::null(),
219 dynstr: core::ptr::null(),
220 dynsym_size: 0,
221 base_addr: core::ptr::null(),
222 consumed_size: 0,
223 }
224 }
225
226 pub fn consumed_size(&self) -> usize {
228 self.consumed_size
229 }
230
231 pub unsafe fn get<S>(&self, symbol: &[u8]) -> Option<Symbol<S>> {
238 unsafe fn cmp_raw_str(ptr: *const u8, s: &[u8]) -> bool {
239 let mut i = 0;
240 for c in s {
241 let sym_c = *ptr.add(i);
242 if sym_c == 0 {
244 return false;
245 }
246 if &sym_c != c {
247 return false;
248 }
249 i += 1;
250 }
251 let term_c = *ptr.add(i);
252 term_c == 0
254 }
255
256 for i in 0..self.dynsym_size {
257 let sym = self.dynsyms.add(i);
258 let str_ptr = self.dynstr.add((*sym).st_name as usize);
259 if cmp_raw_str(str_ptr, symbol) {
260 let sym_ptr = self.base_addr.add((*sym).st_value as usize);
261 return Some(Symbol::new(sym_ptr));
262 }
263 }
264
265 return None;
266 }
267}
268
269#[repr(C)]
273#[repr(align(4096))]
274pub struct CKBDLContext<T>(T);
275
276#[deprecated(
277 since = "0.7.3",
278 note = "Please use the dynamic_loading_c_impl instead"
279)]
280impl<T> CKBDLContext<T> {
281 pub unsafe fn new() -> Self {
287 zeroed()
288 }
289
290 pub fn load<'a>(&'a mut self, dep_cell_data_hash: &[u8]) -> Result<Library, Error> {
293 self.load_with_offset(dep_cell_data_hash, 0, size_of::<CKBDLContext<T>>())
294 }
295
296 pub fn load_with_offset<'a>(
319 &'a mut self,
320 dep_cell_data_hash: &[u8],
321 offset: usize,
322 size: usize,
323 ) -> Result<Library, Error> {
324 if size_of::<Library>() > RISCV_PGSIZE || size < RISCV_PGSIZE {
325 return Err(Error::ContextFailure);
326 }
327
328 if ((size >> RISCV_PGSIZE_SHIFT) << RISCV_PGSIZE_SHIFT) != size {
330 return Err(Error::InvalidAlign);
331 }
332
333 unsafe {
334 let aligned_size = size;
336 let aligned_addr = (&mut self.0 as *mut T).cast::<u8>().add(offset);
337 let mut library = Library::new();
338 library.base_addr = aligned_addr;
339
340 let index = find_cell_by_data_hash(dep_cell_data_hash, Source::CellDep)?
341 .ok_or(Error::CellNotFound)?;
342
343 let mut hdr = Elf64Ehdr::default();
345 let len = size_of::<Elf64Ehdr>();
346 let loaded_len = {
347 let elf_hdr_ptr = &mut hdr as *mut Elf64Ehdr;
348 match load_cell_data_raw(elf_hdr_ptr.cast(), len, 0, index, Source::CellDep) {
349 Ok(len) => len,
350 Err(SysError::LengthNotEnough(_)) => len,
351 Err(err) => return Err(err.into()),
352 }
353 };
354 if loaded_len < len {
355 return Err(Error::InvalidElf);
356 }
357 if (hdr.e_phentsize as usize != size_of::<Elf64Phdr>())
358 || (hdr.e_shentsize as usize != size_of::<Elf64Shdr>())
359 || (hdr.e_phnum > 16)
360 || (hdr.e_shnum > 32)
361 {
362 return Err(Error::InvalidElf);
363 }
364
365 let mut program_hdrs: [Elf64Phdr; 16] = Default::default();
367 let len = size_of::<Elf64Phdr>() * hdr.e_phnum as usize;
368 let loaded_len = {
369 let ptr = program_hdrs.as_mut_ptr();
370 match load_cell_data_raw(
371 ptr.cast(),
372 len,
373 hdr.e_phoff as usize,
374 index,
375 Source::CellDep,
376 ) {
377 Ok(len) => len,
378 Err(SysError::LengthNotEnough(_)) => len,
379 Err(err) => return Err(err.into()),
380 }
381 };
382 if loaded_len < len {
383 return Err(Error::InvalidElf);
384 }
385 let mut max_consumed_size = 0;
386 for ph in &program_hdrs[0..hdr.e_phnum as usize] {
387 if ph.p_type as usize == PT_LOAD && ph.p_memsz > 0 {
388 if (ph.p_flags as usize & PF_X) != 0 {
389 let prepad = ph.p_vaddr as usize % RISCV_PGSIZE;
390 let vaddr = ph.p_vaddr as usize - prepad;
391 let memsz = roundup_shift(prepad + ph.p_memsz as usize, RISCV_PGSIZE_SHIFT);
392 let size = vaddr + memsz;
393 if size > aligned_size {
394 return Err(Error::MemoryNotEnough);
395 }
396 load_cell_code(
397 aligned_addr.add(vaddr),
398 memsz,
399 ph.p_offset as usize,
400 ph.p_filesz as usize,
401 index,
402 Source::CellDep,
403 )?;
404 max_consumed_size = max(max_consumed_size, vaddr + memsz);
405 } else {
406 let filesz = ph.p_filesz as usize;
407 let size = ph.p_vaddr as usize + filesz;
408 let consumed_end: usize = roundup_shift(size, RISCV_PGSIZE_SHIFT);
409 if consumed_end > aligned_size {
410 return Err(Error::MemoryNotEnough);
411 }
412 let loaded_len = match load_cell_data_raw(
413 aligned_addr.add(ph.p_vaddr as usize),
414 filesz,
415 ph.p_offset as usize,
416 index,
417 Source::CellDep,
418 ) {
419 Ok(len) => len,
420 Err(SysError::LengthNotEnough(_)) => filesz,
421 Err(err) => return Err(err.into()),
422 };
423 if loaded_len < filesz {
424 return Err(Error::InvalidElf);
425 }
426 max_consumed_size = max(max_consumed_size, consumed_end);
427 }
428 }
429 }
430
431 let mut section_hdrs: [Elf64Shdr; 32] = Default::default();
435 let len = size_of::<Elf64Shdr>() * hdr.e_shnum as usize;
436 let loaded_len = {
437 let ptr = section_hdrs.as_mut_ptr();
438 match load_cell_data_raw(
439 ptr.cast(),
440 len,
441 hdr.e_shoff as usize,
442 index,
443 Source::CellDep,
444 ) {
445 Ok(len) => len,
446 Err(SysError::LengthNotEnough(_)) => len,
447 Err(err) => return Err(err.into()),
448 }
449 };
450 if loaded_len < len {
451 return Err(Error::InvalidElf);
452 }
453
454 let shshrtab = §ion_hdrs[hdr.e_shstrndx as usize];
457 let mut shrtab = [0u8; 4096];
458 if shshrtab.sh_size > 4096 {
459 return Err(Error::InvalidElf);
460 }
461 let shrtab_len = shshrtab.sh_size as usize;
462 let _loaded_len = {
463 let ptr = shrtab.as_mut_ptr();
464 match load_cell_data_raw(
465 ptr.cast(),
466 shrtab_len,
467 shshrtab.sh_offset as usize,
468 index,
469 Source::CellDep,
470 ) {
471 Ok(len) => len,
472 Err(SysError::LengthNotEnough(_)) => len,
473 Err(err) => return Err(err.into()),
474 }
475 };
476 if shrtab_len < shshrtab.sh_size as usize {
477 return Err(Error::InvalidElf);
478 }
479 for sh in §ion_hdrs[0..hdr.e_shnum as usize] {
480 if sh.sh_type as usize == SHT_RELA {
481 if sh.sh_entsize as usize != size_of::<Elf64Rela>() {
482 return Err(Error::InvalidElf);
483 }
484 let mut relocation_size = (sh.sh_size / sh.sh_entsize) as usize;
485 let mut current_offset = sh.sh_offset as usize;
486 while relocation_size > 0 {
487 let mut relocations: [Elf64Rela; 64] = zeroed();
488 let load_size = min(relocation_size, 64) as usize;
489 let load_length = load_size * size_of::<Elf64Rela>();
490 let loaded_len = {
491 let ptr = relocations.as_mut_ptr();
492 match load_cell_data_raw(
493 ptr.cast(),
494 load_length,
495 current_offset,
496 index,
497 Source::CellDep,
498 ) {
499 Ok(len) => len,
500 Err(SysError::LengthNotEnough(_)) => load_length,
501 Err(err) => return Err(err.into()),
502 }
503 };
504 if loaded_len < load_length {
505 return Err(Error::InvalidElf);
506 }
507 relocation_size -= load_size;
508 current_offset += len;
509 for r in &relocations[0..load_size] {
510 if r.r_info as usize != R_RISCV_RELATIVE {
511 return Err(Error::InvalidElf);
514 }
515 aligned_addr
516 .add(r.r_offset as usize)
517 .cast::<u64>()
518 .write_unaligned(
519 aligned_addr.offset(r.r_addend as isize) as usize as u64
520 );
521 }
522 }
523 } else if sh.sh_type as usize == SHT_DYNSYM {
524 if sh.sh_entsize as usize != size_of::<Elf64Sym>() {
526 return Err(Error::InvalidElf);
527 }
528 library.dynsyms = aligned_addr.add(sh.sh_offset as usize).cast();
529 library.dynsym_size = (sh.sh_size / sh.sh_entsize) as usize;
530 } else if sh.sh_type as usize == SHT_STRTAB {
531 let s = b".dynstr";
532 if &shrtab[sh.sh_name as usize..sh.sh_name as usize + s.len()] == s {
533 library.dynstr = aligned_addr.add(sh.sh_offset as usize);
534 }
535 }
536 }
537
538 if library.dynsyms.is_null() || library.dynstr.is_null() {
539 return Err(Error::InvalidElf);
540 }
541 let consumed_size = max_consumed_size + RISCV_PGSIZE;
542 library.consumed_size = consumed_size;
543 return Ok(library);
544 }
545 }
546}