1#![allow(clippy::integer_arithmetic)]
2use crate::{
16 aligned_memory::AlignedMemory,
17 ebpf,
18 elf::Executable,
19 error::EbpfError,
20 interpreter::Interpreter,
21 memory_region::{MemoryMapping, MemoryRegion},
22 static_analysis::{Analysis, TraceLogEntry},
23 verifier::Verifier,
24};
25use rand::Rng;
26use std::{
27 collections::{BTreeMap, HashMap},
28 fmt::Debug,
29 marker::PhantomData,
30 mem,
31 sync::Arc,
32};
33
34#[derive(Debug)]
36#[repr(C, u64)]
37pub enum StableResult<T, E> {
38 Ok(T),
40 Err(E),
42}
43
44impl<T: Debug, E: Debug> StableResult<T, E> {
45 pub fn is_ok(&self) -> bool {
47 match self {
48 Self::Ok(_) => true,
49 Self::Err(_) => false,
50 }
51 }
52
53 pub fn is_err(&self) -> bool {
55 match self {
56 Self::Ok(_) => false,
57 Self::Err(_) => true,
58 }
59 }
60
61 pub fn unwrap(self) -> T {
63 match self {
64 Self::Ok(value) => value,
65 Self::Err(error) => panic!("unwrap {:?}", error),
66 }
67 }
68
69 pub fn unwrap_err(self) -> E {
71 match self {
72 Self::Ok(value) => panic!("unwrap_err {:?}", value),
73 Self::Err(error) => error,
74 }
75 }
76}
77
78impl<T, E> From<StableResult<T, E>> for Result<T, E> {
79 fn from(result: StableResult<T, E>) -> Self {
80 match result {
81 StableResult::Ok(value) => Ok(value),
82 StableResult::Err(value) => Err(value),
83 }
84 }
85}
86
87impl<T, E> From<Result<T, E>> for StableResult<T, E> {
88 fn from(result: Result<T, E>) -> Self {
89 match result {
90 Ok(value) => Self::Ok(value),
91 Err(value) => Self::Err(value),
92 }
93 }
94}
95
96pub type ProgramResult = StableResult<u64, EbpfError>;
98
99pub type FunctionRegistry = BTreeMap<u32, (usize, String)>;
101
102pub type BuiltInFunction<C> =
104 fn(&mut C, u64, u64, u64, u64, u64, &mut MemoryMapping, &mut ProgramResult);
105
106pub struct BuiltInProgram<C: ContextObject> {
108 config: Option<Box<Config>>,
110 functions: HashMap<u32, (&'static str, BuiltInFunction<C>)>,
112}
113
114impl<C: ContextObject> BuiltInProgram<C> {
115 pub fn new_loader(config: Config) -> Self {
117 Self {
118 config: Some(Box::new(config)),
119 functions: HashMap::new(),
120 }
121 }
122
123 pub fn get_config(&self) -> &Config {
125 self.config.as_ref().unwrap()
126 }
127
128 pub fn register_function_by_name(
130 &mut self,
131 name: &'static str,
132 function: BuiltInFunction<C>,
133 ) -> Result<(), EbpfError> {
134 let key = ebpf::hash_symbol_name(name.as_bytes());
135 if self.functions.insert(key, (name, function)).is_some() {
136 Err(EbpfError::FunctionAlreadyRegistered(key as usize))
137 } else {
138 Ok(())
139 }
140 }
141
142 pub fn lookup_function(&self, key: u32) -> Option<(&'static str, BuiltInFunction<C>)> {
144 self.functions.get(&key).cloned()
145 }
146
147 pub fn mem_size(&self) -> usize {
149 mem::size_of::<Self>()
150 + if self.config.is_some() {
151 mem::size_of::<Config>()
152 } else {
153 0
154 }
155 + self.functions.capacity()
156 * mem::size_of::<(u32, (&'static str, BuiltInFunction<C>))>()
157 }
158}
159
160impl<C: ContextObject> Default for BuiltInProgram<C> {
161 fn default() -> Self {
162 Self {
163 config: None,
164 functions: HashMap::new(),
165 }
166 }
167}
168
169impl<C: ContextObject> Debug for BuiltInProgram<C> {
170 fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
171 writeln!(f, "{:?}", unsafe {
172 std::mem::transmute::<_, &HashMap<u32, *const u8>>(&self.functions)
173 })?;
174 Ok(())
175 }
176}
177
178impl<C: ContextObject> PartialEq for BuiltInProgram<C> {
179 fn eq(&self, other: &Self) -> bool {
180 for ((a_key, a_function), (b_key, b_function)) in
181 self.functions.iter().zip(other.functions.iter())
182 {
183 if a_key != b_key || a_function as *const _ as usize != b_function as *const _ as usize
184 {
185 return false;
186 }
187 }
188 true
189 }
190}
191
192pub const PROGRAM_ENVIRONMENT_KEY_SHIFT: u32 = 4;
196
197#[derive(Debug, Clone, Copy, PartialEq, Eq)]
199pub struct Config {
200 pub max_call_depth: usize,
202 pub stack_frame_size: usize,
204 pub enable_stack_frame_gaps: bool,
206 pub instruction_meter_checkpoint_distance: usize,
208 pub enable_instruction_meter: bool,
210 pub enable_instruction_tracing: bool,
212 pub enable_symbol_and_section_labels: bool,
214 pub reject_broken_elfs: bool,
216 pub noop_instruction_rate: u32,
218 pub sanitize_user_provided_values: bool,
220 pub runtime_environment_key: i32,
224 pub external_internal_function_hash_collision: bool,
226 pub reject_callx_r10: bool,
228 pub dynamic_stack_frames: bool,
230 pub enable_sdiv: bool,
232 pub optimize_rodata: bool,
234 pub static_syscalls: bool,
236 pub enable_elf_vaddr: bool,
239 pub new_elf_parser: bool,
241 pub reject_rodata_stack_overlap: bool,
244 pub aligned_memory_mapping: bool,
246}
247
248impl Config {
249 pub fn stack_size(&self) -> usize {
251 self.stack_frame_size * self.max_call_depth
252 }
253}
254
255impl Default for Config {
256 fn default() -> Self {
257 Self {
258 max_call_depth: 20,
259 stack_frame_size: 4_096,
260 enable_stack_frame_gaps: true,
261 instruction_meter_checkpoint_distance: 10000,
262 enable_instruction_meter: true,
263 enable_instruction_tracing: false,
264 enable_symbol_and_section_labels: false,
265 reject_broken_elfs: false,
266 noop_instruction_rate: 256,
267 sanitize_user_provided_values: true,
268 runtime_environment_key: rand::thread_rng().gen::<i32>()
269 >> PROGRAM_ENVIRONMENT_KEY_SHIFT,
270 external_internal_function_hash_collision: true,
271 reject_callx_r10: true,
272 dynamic_stack_frames: true,
273 enable_sdiv: true,
274 optimize_rodata: true,
275 static_syscalls: true,
276 enable_elf_vaddr: true,
277 new_elf_parser: true,
278 reject_rodata_stack_overlap: true,
279 aligned_memory_mapping: true,
280 }
281 }
282}
283
284impl<C: ContextObject> Executable<C> {
286 pub fn from_elf(elf_bytes: &[u8], loader: Arc<BuiltInProgram<C>>) -> Result<Self, EbpfError> {
288 let executable = Executable::load(elf_bytes, loader)?;
289 Ok(executable)
290 }
291 pub fn from_text_bytes(
293 text_bytes: &[u8],
294 loader: Arc<BuiltInProgram<C>>,
295 function_registry: FunctionRegistry,
296 ) -> Result<Self, EbpfError> {
297 Executable::new_from_text_bytes(text_bytes, loader, function_registry)
298 .map_err(EbpfError::ElfError)
299 }
300}
301
302#[derive(Debug, PartialEq)]
304#[repr(transparent)]
305pub struct VerifiedExecutable<V: Verifier, C: ContextObject> {
306 executable: Executable<C>,
307 _verifier: PhantomData<V>,
308}
309
310impl<V: Verifier, C: ContextObject> VerifiedExecutable<V, C> {
311 pub fn from_executable(executable: Executable<C>) -> Result<Self, EbpfError> {
313 <V as Verifier>::verify(
314 executable.get_text_bytes().1,
315 executable.get_config(),
316 executable.get_function_registry(),
317 )?;
318 Ok(VerifiedExecutable {
319 executable,
320 _verifier: PhantomData,
321 })
322 }
323
324 #[cfg(all(feature = "jit", not(target_os = "windows"), target_arch = "x86_64"))]
326 pub fn jit_compile(&mut self) -> Result<(), EbpfError> {
327 Executable::<C>::jit_compile(&mut self.executable)
328 }
329
330 pub fn get_executable(&self) -> &Executable<C> {
332 &self.executable
333 }
334}
335
336pub trait ContextObject {
338 fn trace(&mut self, state: [u64; 12]);
340 fn consume(&mut self, amount: u64);
342 fn get_remaining(&self) -> u64;
344}
345
346#[derive(Debug, Clone, Default, PartialEq, Eq)]
348pub struct TestContextObject {
349 pub trace_log: Vec<TraceLogEntry>,
351 pub remaining: u64,
353}
354
355impl ContextObject for TestContextObject {
356 fn trace(&mut self, state: [u64; 12]) {
357 self.trace_log.push(state);
358 }
359
360 fn consume(&mut self, amount: u64) {
361 debug_assert!(amount <= self.remaining, "Execution count exceeded");
362 self.remaining = self.remaining.saturating_sub(amount);
363 }
364
365 fn get_remaining(&self) -> u64 {
366 self.remaining
367 }
368}
369
370impl TestContextObject {
371 pub fn new(remaining: u64) -> Self {
373 Self {
374 trace_log: Vec::new(),
375 remaining,
376 }
377 }
378
379 pub fn compare_trace_log(interpreter: &Self, jit: &Self) -> bool {
383 let interpreter = interpreter.trace_log.as_slice();
384 let mut jit = jit.trace_log.as_slice();
385 if jit.len() > interpreter.len() {
386 jit = &jit[0..interpreter.len()];
387 }
388 interpreter == jit
389 }
390}
391
392pub struct DynamicAnalysis {
394 pub edge_counter_max: usize,
396 pub edges: BTreeMap<usize, BTreeMap<usize, usize>>,
398}
399
400impl DynamicAnalysis {
401 pub fn new(trace_log: &[[u64; 12]], analysis: &Analysis) -> Self {
403 let mut result = Self {
404 edge_counter_max: 0,
405 edges: BTreeMap::new(),
406 };
407 let mut last_basic_block = usize::MAX;
408 for traced_instruction in trace_log.iter() {
409 let pc = traced_instruction[11] as usize;
410 if analysis.cfg_nodes.contains_key(&pc) {
411 let counter = result
412 .edges
413 .entry(last_basic_block)
414 .or_insert_with(BTreeMap::new)
415 .entry(pc)
416 .or_insert(0);
417 *counter += 1;
418 result.edge_counter_max = result.edge_counter_max.max(*counter);
419 last_basic_block = pc;
420 }
421 }
422 result
423 }
424}
425
426#[derive(Clone, Default)]
428pub struct CallFrame {
429 pub caller_saved_registers: [u64; ebpf::SCRATCH_REGS],
431 pub frame_pointer: u64,
433 pub target_pc: usize,
435}
436
437#[repr(C)]
440pub struct RuntimeEnvironment<'a, C: ContextObject> {
441 pub host_stack_pointer: *mut u64,
443 pub call_depth: u64,
448 pub stack_pointer: u64,
455 pub context_object_pointer: &'a mut C,
457 pub previous_instruction_meter: u64,
459 pub stopwatch_numerator: u64,
461 pub stopwatch_denominator: u64,
463 pub program_result: ProgramResult,
465 pub memory_mapping: MemoryMapping<'a>,
467 pub call_frames: Vec<CallFrame>,
469}
470
471pub struct EbpfVm<'a, V: Verifier, C: ContextObject> {
498 pub(crate) verified_executable: &'a VerifiedExecutable<V, C>,
499 _stack: AlignedMemory<{ ebpf::HOST_ALIGN }>,
500 #[cfg(feature = "debugger")]
502 pub debug_port: Option<u16>,
503 pub env: RuntimeEnvironment<'a, C>,
505}
506
507impl<'a, V: Verifier, C: ContextObject> EbpfVm<'a, V, C> {
508 pub fn new(
510 verified_executable: &'a VerifiedExecutable<V, C>,
511 context_object: &'a mut C,
512 heap_region: &mut [u8],
513 additional_regions: Vec<MemoryRegion>,
514 ) -> Result<EbpfVm<'a, V, C>, EbpfError> {
515 let executable = verified_executable.get_executable();
516 let config = executable.get_config();
517 let mut stack = AlignedMemory::zero_filled(config.stack_size());
518 let stack_pointer = ebpf::MM_STACK_START.saturating_add(if config.dynamic_stack_frames {
519 stack.len()
521 } else {
522 config.stack_frame_size
524 } as u64);
525 let regions: Vec<MemoryRegion> = vec![
526 verified_executable.get_executable().get_ro_region(),
527 MemoryRegion::new_writable_gapped(
528 stack.as_slice_mut(),
529 ebpf::MM_STACK_START,
530 if !config.dynamic_stack_frames && config.enable_stack_frame_gaps {
531 config.stack_frame_size as u64
532 } else {
533 0
534 },
535 ),
536 MemoryRegion::new_writable(heap_region, ebpf::MM_HEAP_START),
537 ]
538 .into_iter()
539 .chain(additional_regions.into_iter())
540 .collect();
541 let vm = EbpfVm {
542 verified_executable,
543 _stack: stack,
544 #[cfg(feature = "debugger")]
545 debug_port: None,
546 env: RuntimeEnvironment {
547 host_stack_pointer: std::ptr::null_mut(),
548 call_depth: 0,
549 stack_pointer,
550 context_object_pointer: context_object,
551 previous_instruction_meter: 0,
552 stopwatch_numerator: 0,
553 stopwatch_denominator: 0,
554 program_result: ProgramResult::Ok(0),
555 memory_mapping: MemoryMapping::new(regions, config)?,
556 call_frames: vec![CallFrame::default(); config.max_call_depth],
557 },
558 };
559 Ok(vm)
560 }
561
562 pub fn execute_program(&mut self, interpreted: bool) -> (u64, ProgramResult) {
566 let mut registers = [0u64; 11];
567 registers[1] = ebpf::MM_INPUT_START;
569 registers[ebpf::FRAME_PTR_REG] = self.env.stack_pointer;
570 let executable = self.verified_executable.get_executable();
571 let target_pc = executable.get_entrypoint_instruction_offset();
572 let config = executable.get_config();
573 let initial_insn_count = if config.enable_instruction_meter {
574 self.env.context_object_pointer.get_remaining()
575 } else {
576 0
577 };
578 self.env.previous_instruction_meter = initial_insn_count;
579 self.env.program_result = ProgramResult::Ok(0);
580 let due_insn_count = if interpreted {
581 #[cfg(feature = "debugger")]
582 let debug_port = self.debug_port.clone();
583 let mut interpreter = match Interpreter::new(self, registers, target_pc) {
584 Ok(interpreter) => interpreter,
585 Err(error) => return (0, ProgramResult::Err(error)),
586 };
587 #[cfg(feature = "debugger")]
588 if let Some(debug_port) = debug_port {
589 crate::debugger::execute(&mut interpreter, debug_port);
590 } else {
591 while interpreter.step() {}
592 }
593 #[cfg(not(feature = "debugger"))]
594 while interpreter.step() {}
595 interpreter.due_insn_count
596 } else {
597 #[cfg(all(feature = "jit", not(target_os = "windows"), target_arch = "x86_64"))]
598 {
599 let compiled_program = match executable
600 .get_compiled_program()
601 .ok_or(EbpfError::JitNotCompiled)
602 {
603 Ok(compiled_program) => compiled_program,
604 Err(error) => return (0, ProgramResult::Err(error)),
605 };
606 let instruction_meter_final = compiled_program
607 .invoke(config, &mut self.env, registers, target_pc)
608 .max(0) as u64;
609 self.env
610 .context_object_pointer
611 .get_remaining()
612 .saturating_sub(instruction_meter_final)
613 }
614 #[cfg(not(all(feature = "jit", not(target_os = "windows"), target_arch = "x86_64")))]
615 {
616 return (0, ProgramResult::Err(EbpfError::JitNotCompiled));
617 }
618 };
619 let instruction_count = if config.enable_instruction_meter {
620 self.env.context_object_pointer.consume(due_insn_count);
621 initial_insn_count.saturating_sub(self.env.context_object_pointer.get_remaining())
622 } else {
623 0
624 };
625 if let ProgramResult::Err(EbpfError::ExceededMaxInstructions(pc, _)) =
626 self.env.program_result
627 {
628 self.env.program_result =
629 ProgramResult::Err(EbpfError::ExceededMaxInstructions(pc, initial_insn_count));
630 }
631 let mut result = ProgramResult::Ok(0);
632 std::mem::swap(&mut result, &mut self.env.program_result);
633 (instruction_count, result)
634 }
635}
636
637#[cfg(test)]
638mod tests {
639 use super::*;
640
641 #[test]
642 fn test_program_result_is_stable() {
643 let ok = ProgramResult::Ok(42);
644 assert_eq!(unsafe { *(&ok as *const _ as *const u64) }, 0);
645 let err = ProgramResult::Err(EbpfError::JitNotCompiled);
646 assert_eq!(unsafe { *(&err as *const _ as *const u64) }, 1);
647 }
648}