1use {
3 crate::{
4 ebpf,
5 elf::ElfError,
6 vm::{Config, ContextObject, EbpfVm},
7 },
8 std::collections::{btree_map::Entry, BTreeMap},
9};
10
11#[derive(Debug, PartialEq, PartialOrd, Eq, Clone, Copy)]
13pub enum SBPFVersion {
14 V0,
16 V1,
18 V2,
20 V3,
22 V4,
24 Reserved,
26}
27
28impl SBPFVersion {
29 pub fn dynamic_stack_frames(self) -> bool {
31 self >= SBPFVersion::V1
32 }
33
34 pub fn enable_pqr(self) -> bool {
36 self >= SBPFVersion::V2
37 }
38 pub fn explicit_sign_extension_of_results(self) -> bool {
40 self >= SBPFVersion::V2
41 }
42 pub fn swap_sub_reg_imm_operands(self) -> bool {
44 self >= SBPFVersion::V2
45 }
46 pub fn disable_neg(self) -> bool {
48 self >= SBPFVersion::V2
49 }
50
51 pub fn callx_uses_src_reg(self) -> bool {
53 self >= SBPFVersion::V2
54 }
55 pub fn disable_lddw(self) -> bool {
57 self >= SBPFVersion::V2
58 }
59 pub fn disable_le(self) -> bool {
61 self >= SBPFVersion::V2
62 }
63 pub fn move_memory_instruction_classes(self) -> bool {
65 self >= SBPFVersion::V2
66 }
67
68 pub fn static_syscalls(self) -> bool {
70 self >= SBPFVersion::V3
71 }
72 pub fn enable_stricter_verification(self) -> bool {
74 self >= SBPFVersion::V3
75 }
76 pub fn enable_stricter_elf_headers(self) -> bool {
78 self >= SBPFVersion::V3
79 }
80 pub fn enable_lower_bytecode_vaddr(self) -> bool {
82 self >= SBPFVersion::V3
83 }
84
85 pub fn calculate_call_imm_target_pc(self, pc: usize, imm: i64) -> u32 {
88 if self.static_syscalls() {
89 (pc as i64).saturating_add(imm).saturating_add(1) as u32
90 } else {
91 imm as u32
92 }
93 }
94}
95
96#[derive(Debug, PartialEq, Eq)]
98pub struct FunctionRegistry<T> {
99 pub(crate) map: BTreeMap<u32, (Vec<u8>, T)>,
100}
101
102impl<T> Default for FunctionRegistry<T> {
103 fn default() -> Self {
104 Self {
105 map: BTreeMap::new(),
106 }
107 }
108}
109
110impl<T: Copy + PartialEq> FunctionRegistry<T> {
111 pub fn register_function(
113 &mut self,
114 key: u32,
115 name: impl Into<Vec<u8>>,
116 value: T,
117 ) -> Result<(), ElfError> {
118 match self.map.entry(key) {
119 Entry::Vacant(entry) => {
120 entry.insert((name.into(), value));
121 }
122 Entry::Occupied(entry) => {
123 if entry.get().1 != value {
124 return Err(ElfError::SymbolHashCollision(key));
125 }
126 }
127 }
128 Ok(())
129 }
130
131 pub(crate) fn register_function_hashed_legacy<C: ContextObject>(
133 &mut self,
134 loader: &BuiltinProgram<C>,
135 hash_symbol_name: bool,
136 name: impl Into<Vec<u8>>,
137 value: T,
138 ) -> Result<u32, ElfError>
139 where
140 usize: From<T>,
141 {
142 let name = name.into();
143 let config = loader.get_config();
144 let key = if hash_symbol_name {
145 let hash = if name == b"entrypoint" {
146 ebpf::hash_symbol_name(b"entrypoint")
147 } else {
148 ebpf::hash_symbol_name(&usize::from(value).to_le_bytes())
149 };
150 if loader.get_function_registry().lookup_by_key(hash).is_some() {
151 return Err(ElfError::SymbolHashCollision(hash));
152 }
153 hash
154 } else {
155 usize::from(value) as u32
156 };
157 self.register_function(
158 key,
159 if config.enable_symbol_and_section_labels || name == b"entrypoint" {
160 name
161 } else {
162 Vec::default()
163 },
164 value,
165 )?;
166 Ok(key)
167 }
168
169 pub fn unregister_function(&mut self, key: u32) {
171 self.map.remove(&key);
172 }
173
174 pub fn keys(&self) -> impl Iterator<Item = u32> + '_ {
176 self.map.keys().copied()
177 }
178
179 pub fn iter(&self) -> impl Iterator<Item = (u32, (&[u8], T))> + '_ {
181 self.map
182 .iter()
183 .map(|(key, (name, value))| (*key, (name.as_slice(), *value)))
184 }
185
186 pub fn lookup_by_key(&self, key: u32) -> Option<(&[u8], T)> {
188 self.map
190 .get(&key)
191 .map(|(function_name, value)| (function_name.as_slice(), *value))
192 }
193
194 pub fn lookup_by_name(&self, name: &[u8]) -> Option<(&[u8], T)> {
196 self.map
197 .values()
198 .find(|(function_name, _value)| function_name == name)
199 .map(|(function_name, value)| (function_name.as_slice(), *value))
200 }
201
202 pub fn mem_size(&self) -> usize {
204 std::mem::size_of::<Self>().saturating_add(self.map.iter().fold(
205 0,
206 |state: usize, (_, (name, value))| {
207 state.saturating_add(
208 std::mem::size_of_val(value).saturating_add(
209 std::mem::size_of_val(name).saturating_add(name.capacity()),
210 ),
211 )
212 },
213 ))
214 }
215}
216
217pub type BuiltinFunction<C> = fn(*mut EbpfVm<C>, u64, u64, u64, u64, u64);
219
220#[derive(Eq)]
222pub struct BuiltinProgram<C: ContextObject> {
223 config: Option<Box<Config>>,
225 sparse_registry: FunctionRegistry<BuiltinFunction<C>>,
227}
228
229impl<C: ContextObject> PartialEq for BuiltinProgram<C> {
230 fn eq(&self, other: &Self) -> bool {
231 self.config.eq(&other.config) && self.sparse_registry.eq(&other.sparse_registry)
232 }
233}
234
235impl<C: ContextObject> BuiltinProgram<C> {
236 pub fn new_loader(config: Config) -> Self {
238 Self {
239 config: Some(Box::new(config)),
240 sparse_registry: FunctionRegistry::default(),
241 }
242 }
243
244 pub fn new_builtin() -> Self {
246 Self {
247 config: None,
248 sparse_registry: FunctionRegistry::default(),
249 }
250 }
251
252 pub fn new_mock() -> Self {
254 Self {
255 config: Some(Box::default()),
256 sparse_registry: FunctionRegistry::default(),
257 }
258 }
259
260 pub fn get_config(&self) -> &Config {
262 self.config.as_ref().unwrap()
263 }
264
265 pub fn get_function_registry(&self) -> &FunctionRegistry<BuiltinFunction<C>> {
267 &self.sparse_registry
268 }
269
270 pub fn mem_size(&self) -> usize {
272 std::mem::size_of::<Self>()
273 .saturating_add(if self.config.is_some() {
274 std::mem::size_of::<Config>()
275 } else {
276 0
277 })
278 .saturating_add(self.sparse_registry.mem_size())
279 }
280
281 pub fn register_function(
283 &mut self,
284 name: &str,
285 value: BuiltinFunction<C>,
286 ) -> Result<(), ElfError> {
287 let key = ebpf::hash_symbol_name(name.as_bytes());
288 self.sparse_registry
289 .register_function(key, name, value)
290 .map(|_| ())
291 }
292}
293
294impl<C: ContextObject> std::fmt::Debug for BuiltinProgram<C> {
295 fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
296 unsafe {
297 writeln!(
298 f,
299 "registry: {:?}",
300 std::mem::transmute::<
302 &FunctionRegistry<BuiltinFunction<C>>,
303 &FunctionRegistry<usize>,
304 >(&self.sparse_registry),
305 )?;
306 }
307 Ok(())
308 }
309}
310
311#[macro_export]
313macro_rules! declare_builtin_function {
314 ($(#[$attr:meta])* $name:ident $(<$($generic_ident:tt : $generic_type:tt),+>)?, fn rust(
315 $vm:ident : &mut $ContextObject:ty,
316 $arg_a:ident : u64,
317 $arg_b:ident : u64,
318 $arg_c:ident : u64,
319 $arg_d:ident : u64,
320 $arg_e:ident : u64,
321 $memory_mapping:ident : &mut $MemoryMapping:ty,
322 ) -> $Result:ty { $($rust:tt)* }) => {
323 $(#[$attr])*
324 pub struct $name {}
325 impl $name {
326 pub fn rust $(<$($generic_ident : $generic_type),+>)? (
328 $vm: &mut $ContextObject,
329 $arg_a: u64,
330 $arg_b: u64,
331 $arg_c: u64,
332 $arg_d: u64,
333 $arg_e: u64,
334 $memory_mapping: &mut $MemoryMapping,
335 ) -> $Result {
336 $($rust)*
337 }
338 #[allow(clippy::too_many_arguments)]
340 pub fn vm $(<$($generic_ident : $generic_type),+>)? (
341 $vm: *mut $crate::vm::EbpfVm<$ContextObject>,
342 $arg_a: u64,
343 $arg_b: u64,
344 $arg_c: u64,
345 $arg_d: u64,
346 $arg_e: u64,
347 ) {
348 use $crate::vm::ContextObject;
349 let vm = unsafe {
350 &mut *($vm.cast::<u64>().offset(-($crate::vm::get_runtime_environment_key() as isize)).cast::<$crate::vm::EbpfVm<$ContextObject>>())
351 };
352 let config = vm.loader.get_config();
353 if config.enable_instruction_meter {
354 vm.context_object_pointer.consume(vm.previous_instruction_meter - vm.due_insn_count);
355 }
356 let converted_result: $crate::error::ProgramResult = Self::rust $(::<$($generic_ident),+>)?(
357 vm.context_object_pointer, $arg_a, $arg_b, $arg_c, $arg_d, $arg_e, &mut vm.memory_mapping,
358 ).map_err(|err| $crate::error::EbpfError::SyscallError(err)).into();
359 vm.program_result = converted_result;
360 if config.enable_instruction_meter {
361 vm.previous_instruction_meter = vm.context_object_pointer.get_remaining();
362 }
363 }
364 }
365 };
366}