1use {
3 crate::{
4 ebpf,
5 elf::ElfError,
6 memory_region::MemoryMapping,
7 vm::{Config, ContextObject, EbpfVm},
8 },
9 std::collections::{btree_map::Entry, BTreeMap},
10};
11
12#[derive(Debug, PartialEq, PartialOrd, Eq, Clone, Copy)]
14pub enum SBPFVersion {
15 V0,
17 V1,
19 V2,
21 V3,
23 V4,
25 Reserved,
27}
28
29impl SBPFVersion {
30 pub fn manual_stack_frame_bump(self) -> bool {
34 self == SBPFVersion::V1 || self == SBPFVersion::V2
35 }
36 pub fn stack_frame_gaps(self) -> bool {
38 self == SBPFVersion::V0
39 }
40
41 pub fn enable_pqr(self) -> bool {
43 self == SBPFVersion::V2
44 }
45 pub fn explicit_sign_extension_of_results(self) -> bool {
47 self == SBPFVersion::V2
48 }
49 pub fn swap_sub_reg_imm_operands(self) -> bool {
51 self == SBPFVersion::V2
52 }
53 pub fn disable_neg(self) -> bool {
55 self == SBPFVersion::V2
56 }
57
58 pub fn callx_uses_src_reg(self) -> bool {
60 self == SBPFVersion::V2
61 }
62 pub fn disable_lddw(self) -> bool {
64 self == SBPFVersion::V2
65 }
66 pub fn disable_le(self) -> bool {
68 self == SBPFVersion::V2
69 }
70 pub fn move_memory_instruction_classes(self) -> bool {
72 self == SBPFVersion::V2
73 }
74
75 pub fn static_syscalls(self) -> bool {
77 self >= SBPFVersion::V3
78 }
79 pub fn enable_stricter_elf_headers(self) -> bool {
81 self >= SBPFVersion::V3
82 }
83 pub fn enable_lower_rodata_vaddr(self) -> bool {
85 self >= SBPFVersion::V3
86 }
87 pub fn enable_jmp32(self) -> bool {
89 self >= SBPFVersion::V3
90 }
91 pub fn callx_uses_dst_reg(self) -> bool {
93 self >= SBPFVersion::V3
94 }
95
96 pub fn calculate_call_imm_target_pc(self, pc: usize, imm: i64) -> u32 {
99 if self.static_syscalls() {
100 (pc as i64).saturating_add(imm).saturating_add(1) as u32
101 } else {
102 imm as u32
103 }
104 }
105}
106
107#[derive(Debug, PartialEq, Eq)]
109pub struct FunctionRegistry<T> {
110 pub(crate) map: BTreeMap<u32, (Vec<u8>, T)>,
111}
112
113impl<T> Default for FunctionRegistry<T> {
114 fn default() -> Self {
115 Self {
116 map: BTreeMap::new(),
117 }
118 }
119}
120
121impl<T: Copy + PartialEq> FunctionRegistry<T> {
122 pub fn register_function(
124 &mut self,
125 key: u32,
126 name: impl Into<Vec<u8>>,
127 value: T,
128 ) -> Result<(), ElfError> {
129 match self.map.entry(key) {
130 Entry::Vacant(entry) => {
131 entry.insert((name.into(), value));
132 }
133 Entry::Occupied(entry) => {
134 if entry.get().1 != value {
135 return Err(ElfError::SymbolHashCollision(key));
136 }
137 }
138 }
139 Ok(())
140 }
141
142 pub(crate) fn register_function_hashed_legacy<C: ContextObject>(
144 &mut self,
145 loader: &BuiltinProgram<C>,
146 hash_symbol_name: bool,
147 name: impl Into<Vec<u8>>,
148 value: T,
149 ) -> Result<u32, ElfError>
150 where
151 usize: From<T>,
152 {
153 let name = name.into();
154 let config = loader.get_config();
155 let key = if hash_symbol_name {
156 let hash = if name == b"entrypoint" {
157 ebpf::hash_symbol_name(b"entrypoint")
158 } else {
159 ebpf::hash_symbol_name(&usize::from(value).to_le_bytes())
160 };
161 if loader.get_function_registry().lookup_by_key(hash).is_some() {
162 return Err(ElfError::SymbolHashCollision(hash));
163 }
164 hash
165 } else {
166 usize::from(value) as u32
167 };
168 self.register_function(
169 key,
170 if config.enable_symbol_and_section_labels || name == b"entrypoint" {
171 name
172 } else {
173 Vec::default()
174 },
175 value,
176 )?;
177 Ok(key)
178 }
179
180 pub fn unregister_function(&mut self, key: u32) {
182 self.map.remove(&key);
183 }
184
185 pub fn keys(&self) -> impl Iterator<Item = u32> + '_ {
187 self.map.keys().copied()
188 }
189
190 pub fn iter(&self) -> impl Iterator<Item = (u32, (&[u8], T))> + '_ {
192 self.map
193 .iter()
194 .map(|(key, (name, value))| (*key, (name.as_slice(), *value)))
195 }
196
197 pub fn lookup_by_key(&self, key: u32) -> Option<(&[u8], T)> {
199 self.map
201 .get(&key)
202 .map(|(function_name, value)| (function_name.as_slice(), *value))
203 }
204
205 pub fn lookup_by_name(&self, name: &[u8]) -> Option<(&[u8], T)> {
207 self.map
208 .values()
209 .find(|(function_name, _value)| function_name == name)
210 .map(|(function_name, value)| (function_name.as_slice(), *value))
211 }
212
213 pub fn mem_size(&self) -> usize {
215 std::mem::size_of::<Self>().saturating_add(self.map.iter().fold(
216 0,
217 |state: usize, (_, (name, value))| {
218 state.saturating_add(
219 std::mem::size_of_val(value).saturating_add(
220 std::mem::size_of_val(name).saturating_add(name.capacity()),
221 ),
222 )
223 },
224 ))
225 }
226}
227
228pub type BuiltinFunction<C> = fn(*mut EbpfVm<C>, u64, u64, u64, u64, u64);
230#[cfg(all(feature = "jit", not(target_os = "windows"), target_arch = "x86_64"))]
232pub type JitCompiler<'a, C> = crate::jit::JitCompiler<'a, C>;
233#[cfg(not(all(feature = "jit", not(target_os = "windows"), target_arch = "x86_64")))]
235pub struct JitCompiler<'a, C> {
236 _phantom: std::marker::PhantomData<&'a C>,
237}
238#[cfg(not(all(feature = "jit", not(target_os = "windows"), target_arch = "x86_64")))]
239impl<'a, C: ContextObject> JitCompiler<'a, C> {
240 #[allow(dead_code)]
242 pub fn emit_external_call(&mut self, _function: BuiltinFunction<C>) {}
243}
244pub type BuiltinCodegen<C> = fn(&mut JitCompiler<C>);
246
247#[derive(Eq)]
249pub struct BuiltinProgram<C: ContextObject> {
250 config: Option<Box<Config>>,
252 sparse_registry: FunctionRegistry<(BuiltinFunction<C>, BuiltinCodegen<C>)>,
254}
255
256impl<C: ContextObject> PartialEq for BuiltinProgram<C> {
257 fn eq(&self, other: &Self) -> bool {
258 self.config.eq(&other.config) && self.sparse_registry.eq(&other.sparse_registry)
259 }
260}
261
262impl<C: ContextObject> BuiltinProgram<C> {
263 pub fn new_loader(config: Config) -> Self {
265 Self {
266 config: Some(Box::new(config)),
267 sparse_registry: FunctionRegistry::default(),
268 }
269 }
270
271 pub fn new_builtin() -> Self {
273 Self {
274 config: None,
275 sparse_registry: FunctionRegistry::default(),
276 }
277 }
278
279 pub fn new_mock() -> Self {
281 Self {
282 config: Some(Box::default()),
283 sparse_registry: FunctionRegistry::default(),
284 }
285 }
286
287 pub fn get_config(&self) -> &Config {
289 self.config.as_ref().unwrap()
290 }
291
292 pub fn get_function_registry(
294 &self,
295 ) -> &FunctionRegistry<(BuiltinFunction<C>, BuiltinCodegen<C>)> {
296 &self.sparse_registry
297 }
298
299 pub fn mem_size(&self) -> usize {
301 std::mem::size_of::<Self>()
302 .saturating_add(if self.config.is_some() {
303 std::mem::size_of::<Config>()
304 } else {
305 0
306 })
307 .saturating_add(self.sparse_registry.mem_size())
308 }
309
310 pub fn register_function(
314 &mut self,
315 name: &str,
316 entry: (BuiltinFunction<C>, BuiltinCodegen<C>),
317 ) -> Result<(), ElfError> {
318 let key = ebpf::hash_symbol_name(name.as_bytes());
319 self.sparse_registry
320 .register_function(key, name, entry)
321 .map(|_| ())
322 }
323
324 pub fn register_definition<BFD: BuiltinFunctionDefinition<C>>(
326 &mut self,
327 name: &str,
328 ) -> Result<(), ElfError> {
329 self.register_function(name, (BFD::vm, BFD::codegen))
330 }
331}
332
333pub trait BuiltinFunctionDefinition<C>
335where
336 C: crate::vm::ContextObject,
337{
338 type Error: Into<Box<dyn core::error::Error>>;
340
341 fn rust(
345 vm: &mut C,
346 arg_a: u64,
347 arg_b: u64,
348 arg_c: u64,
349 arg_d: u64,
350 arg_e: u64,
351 memory_mapping: &mut MemoryMapping,
352 ) -> Result<u64, Self::Error>;
353
354 #[expect(clippy::arithmetic_side_effects)]
356 fn vm(vm: *mut crate::vm::EbpfVm<C>, a: u64, b: u64, c: u64, d: u64, e: u64) {
357 let vm = unsafe {
358 &mut *(vm
359 .cast::<u64>()
360 .offset(-(crate::vm::get_runtime_environment_key() as isize))
361 .cast::<crate::vm::EbpfVm<C>>())
362 };
363 let config = vm.loader.get_config();
364 if config.enable_instruction_meter {
365 vm.context_object_pointer
366 .consume(vm.previous_instruction_meter - vm.due_insn_count);
367 }
368 let converted_result: crate::error::ProgramResult = Self::rust(
369 vm.context_object_pointer,
370 a,
371 b,
372 c,
373 d,
374 e,
375 &mut vm.memory_mapping,
376 )
377 .map_err(|err| crate::error::EbpfError::SyscallError(err.into()))
378 .into();
379 vm.program_result = converted_result;
380 if config.enable_instruction_meter {
381 vm.previous_instruction_meter = vm.context_object_pointer.get_remaining();
382 }
383 }
384
385 fn codegen(jit: &mut JitCompiler<C>) {
390 jit.emit_external_call(Self::vm);
391 }
392
393 fn register(program: &mut BuiltinProgram<C>, name: &str) -> Result<(), ElfError>
395 where
396 Self: Sized,
397 {
398 program.register_definition::<Self>(name)
399 }
400}
401
402impl<C: ContextObject> std::fmt::Debug for BuiltinProgram<C> {
403 fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
404 f.debug_struct("BuiltinProgram")
405 .field("registry", unsafe {
406 std::mem::transmute::<
407 &FunctionRegistry<(BuiltinFunction<C>, BuiltinCodegen<C>)>,
408 &FunctionRegistry<(usize, usize)>,
409 >(&self.sparse_registry)
410 })
411 .finish()
412 }
413}
414
415#[macro_export]
417macro_rules! declare_builtin_function {
418 (
419 $(#[$attr:meta])*
420 $name:ident $(<$($generic_ident:tt : $generic_type:tt),+>)?,
421 fn rust(
422 $vm:ident : &mut $ContextObject:ty,
423 $arg_a:ident : u64,
424 $arg_b:ident : u64,
425 $arg_c:ident : u64,
426 $arg_d:ident : u64,
427 $arg_e:ident : u64,
428 $memory_mapping:ident : &mut $MemoryMapping:ty,
429 ) -> Result<$Ok:ty, $Err:ty> {
430 $($rust:tt)*
431 }
432 $(fn codegen(
433 $jit:ident : &mut $crate::program::JitCompiler<$ContextObject2:ty>,
434 ) {
435 $($codegen:tt)*
436 })?
437 ) => {
438 $(#[$attr])*
439 pub struct $name $(<$($generic_ident),+>)? (
440 $(std::marker::PhantomData<($($generic_ident,)+)>)?
441 );
442 impl $(<$($generic_ident : $generic_type),+>)?
443 $crate::program::BuiltinFunctionDefinition<$ContextObject> for
444 $name $(<$($generic_ident),+>)?
445 {
446 type Error = $Err;
447 fn rust(
448 $vm: &mut $ContextObject,
449 $arg_a: u64,
450 $arg_b: u64,
451 $arg_c: u64,
452 $arg_d: u64,
453 $arg_e: u64,
454 $memory_mapping: &mut $MemoryMapping,
455 ) -> core::result::Result<$Ok, $Err> {
456 $($rust)*
457 }
458 $(fn codegen(
459 $jit: &mut $crate::program::JitCompiler<$ContextObject2>,
460 ) {
461 $($codegen)*
462 })?
463 }
464 };
465}