1use {
3 crate::{
4 ebpf,
5 elf::ElfError,
6 vm::{Config, ContextObject, EbpfVm},
7 },
8 std::collections::{btree_map::Entry, BTreeMap},
9};
10
11#[derive(Debug, PartialEq, PartialOrd, Eq, Clone, Copy)]
13pub enum SBPFVersion {
14 V0,
16 V1,
18 V2,
20 V3,
22 Reserved,
24}
25
26impl SBPFVersion {
27 pub fn dynamic_stack_frames(self) -> bool {
29 self >= SBPFVersion::V1
30 }
31
32 pub fn enable_pqr(self) -> bool {
34 self >= SBPFVersion::V2
35 }
36 pub fn explicit_sign_extension_of_results(self) -> bool {
38 self >= SBPFVersion::V2
39 }
40 pub fn swap_sub_reg_imm_operands(self) -> bool {
42 self >= SBPFVersion::V2
43 }
44 pub fn disable_neg(self) -> bool {
46 self >= SBPFVersion::V2
47 }
48
49 pub fn callx_uses_src_reg(self) -> bool {
51 self >= SBPFVersion::V2
52 }
53 pub fn disable_lddw(self) -> bool {
55 self >= SBPFVersion::V2
56 }
57 pub fn disable_le(self) -> bool {
59 self >= SBPFVersion::V2
60 }
61 pub fn move_memory_instruction_classes(self) -> bool {
63 self >= SBPFVersion::V2
64 }
65
66 pub fn static_syscalls(self) -> bool {
69 self >= SBPFVersion::V3
70 }
71 pub fn enable_stricter_elf_headers(self) -> bool {
73 self >= SBPFVersion::V3
74 }
75 pub fn enable_lower_bytecode_vaddr(self) -> bool {
77 self >= SBPFVersion::V3
78 }
79
80 pub fn reject_rodata_stack_overlap(self) -> bool {
83 self != SBPFVersion::V0
84 }
85
86 pub fn enable_elf_vaddr(self) -> bool {
88 self != SBPFVersion::V0
89 }
90
91 pub fn calculate_call_imm_target_pc(self, pc: usize, imm: i64) -> u32 {
94 if self.static_syscalls() {
95 (pc as i64).saturating_add(imm).saturating_add(1) as u32
96 } else {
97 imm as u32
98 }
99 }
100}
101
102#[derive(Debug, PartialEq, Eq)]
104pub struct FunctionRegistry<T> {
105 pub(crate) map: BTreeMap<u32, (Vec<u8>, T)>,
106}
107
108impl<T> Default for FunctionRegistry<T> {
109 fn default() -> Self {
110 Self {
111 map: BTreeMap::new(),
112 }
113 }
114}
115
116impl<T: Copy + PartialEq> FunctionRegistry<T> {
117 pub fn register_function(
119 &mut self,
120 key: u32,
121 name: impl Into<Vec<u8>>,
122 value: T,
123 ) -> Result<(), ElfError> {
124 match self.map.entry(key) {
125 Entry::Vacant(entry) => {
126 entry.insert((name.into(), value));
127 }
128 Entry::Occupied(entry) => {
129 if entry.get().1 != value {
130 return Err(ElfError::SymbolHashCollision(key));
131 }
132 }
133 }
134 Ok(())
135 }
136
137 pub(crate) fn register_function_hashed_legacy<C: ContextObject>(
139 &mut self,
140 loader: &BuiltinProgram<C>,
141 hash_symbol_name: bool,
142 name: impl Into<Vec<u8>>,
143 value: T,
144 ) -> Result<u32, ElfError>
145 where
146 usize: From<T>,
147 {
148 let name = name.into();
149 let config = loader.get_config();
150 let key = if hash_symbol_name {
151 let hash = if name == b"entrypoint" {
152 ebpf::hash_symbol_name(b"entrypoint")
153 } else {
154 ebpf::hash_symbol_name(&usize::from(value).to_le_bytes())
155 };
156 if loader.get_function_registry().lookup_by_key(hash).is_some() {
157 return Err(ElfError::SymbolHashCollision(hash));
158 }
159 hash
160 } else {
161 usize::from(value) as u32
162 };
163 self.register_function(
164 key,
165 if config.enable_symbol_and_section_labels || name == b"entrypoint" {
166 name
167 } else {
168 Vec::default()
169 },
170 value,
171 )?;
172 Ok(key)
173 }
174
175 pub fn unregister_function(&mut self, key: u32) {
177 self.map.remove(&key);
178 }
179
180 pub fn keys(&self) -> impl Iterator<Item = u32> + '_ {
182 self.map.keys().copied()
183 }
184
185 pub fn iter(&self) -> impl Iterator<Item = (u32, (&[u8], T))> + '_ {
187 self.map
188 .iter()
189 .map(|(key, (name, value))| (*key, (name.as_slice(), *value)))
190 }
191
192 pub fn lookup_by_key(&self, key: u32) -> Option<(&[u8], T)> {
194 self.map
196 .get(&key)
197 .map(|(function_name, value)| (function_name.as_slice(), *value))
198 }
199
200 pub fn lookup_by_name(&self, name: &[u8]) -> Option<(&[u8], T)> {
202 self.map
203 .values()
204 .find(|(function_name, _value)| function_name == name)
205 .map(|(function_name, value)| (function_name.as_slice(), *value))
206 }
207
208 pub fn mem_size(&self) -> usize {
210 std::mem::size_of::<Self>().saturating_add(self.map.iter().fold(
211 0,
212 |state: usize, (_, (name, value))| {
213 state.saturating_add(
214 std::mem::size_of_val(value).saturating_add(
215 std::mem::size_of_val(name).saturating_add(name.capacity()),
216 ),
217 )
218 },
219 ))
220 }
221}
222
223pub type BuiltinFunction<C> = fn(*mut EbpfVm<C>, u64, u64, u64, u64, u64);
225
226#[derive(Eq)]
228pub struct BuiltinProgram<C: ContextObject> {
229 config: Option<Box<Config>>,
231 sparse_registry: FunctionRegistry<BuiltinFunction<C>>,
233}
234
235impl<C: ContextObject> PartialEq for BuiltinProgram<C> {
236 fn eq(&self, other: &Self) -> bool {
237 self.config.eq(&other.config) && self.sparse_registry.eq(&other.sparse_registry)
238 }
239}
240
241impl<C: ContextObject> BuiltinProgram<C> {
242 pub fn new_loader(config: Config) -> Self {
244 Self {
245 config: Some(Box::new(config)),
246 sparse_registry: FunctionRegistry::default(),
247 }
248 }
249
250 pub fn new_builtin() -> Self {
252 Self {
253 config: None,
254 sparse_registry: FunctionRegistry::default(),
255 }
256 }
257
258 pub fn new_mock() -> Self {
260 Self {
261 config: Some(Box::default()),
262 sparse_registry: FunctionRegistry::default(),
263 }
264 }
265
266 pub fn get_config(&self) -> &Config {
268 self.config.as_ref().unwrap()
269 }
270
271 pub fn get_function_registry(&self) -> &FunctionRegistry<BuiltinFunction<C>> {
273 &self.sparse_registry
274 }
275
276 pub fn mem_size(&self) -> usize {
278 std::mem::size_of::<Self>()
279 .saturating_add(if self.config.is_some() {
280 std::mem::size_of::<Config>()
281 } else {
282 0
283 })
284 .saturating_add(self.sparse_registry.mem_size())
285 }
286
287 pub fn register_function(
289 &mut self,
290 name: &str,
291 value: BuiltinFunction<C>,
292 ) -> Result<(), ElfError> {
293 let key = ebpf::hash_symbol_name(name.as_bytes());
294 self.sparse_registry
295 .register_function(key, name, value)
296 .map(|_| ())
297 }
298}
299
300impl<C: ContextObject> std::fmt::Debug for BuiltinProgram<C> {
301 fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
302 unsafe {
303 writeln!(
304 f,
305 "registry: {:?}",
306 std::mem::transmute::<
308 &FunctionRegistry<BuiltinFunction<C>>,
309 &FunctionRegistry<usize>,
310 >(&self.sparse_registry),
311 )?;
312 }
313 Ok(())
314 }
315}
316
317#[macro_export]
319macro_rules! declare_builtin_function {
320 ($(#[$attr:meta])* $name:ident $(<$($generic_ident:tt : $generic_type:tt),+>)?, fn rust(
321 $vm:ident : &mut $ContextObject:ty,
322 $arg_a:ident : u64,
323 $arg_b:ident : u64,
324 $arg_c:ident : u64,
325 $arg_d:ident : u64,
326 $arg_e:ident : u64,
327 $memory_mapping:ident : &mut $MemoryMapping:ty,
328 ) -> $Result:ty { $($rust:tt)* }) => {
329 $(#[$attr])*
330 pub struct $name {}
331 impl $name {
332 pub fn rust $(<$($generic_ident : $generic_type),+>)? (
334 $vm: &mut $ContextObject,
335 $arg_a: u64,
336 $arg_b: u64,
337 $arg_c: u64,
338 $arg_d: u64,
339 $arg_e: u64,
340 $memory_mapping: &mut $MemoryMapping,
341 ) -> $Result {
342 $($rust)*
343 }
344 #[allow(clippy::too_many_arguments)]
346 pub fn vm $(<$($generic_ident : $generic_type),+>)? (
347 $vm: *mut $crate::vm::EbpfVm<$ContextObject>,
348 $arg_a: u64,
349 $arg_b: u64,
350 $arg_c: u64,
351 $arg_d: u64,
352 $arg_e: u64,
353 ) {
354 use $crate::vm::ContextObject;
355 let vm = unsafe {
356 &mut *($vm.cast::<u64>().offset(-($crate::vm::get_runtime_environment_key() as isize)).cast::<$crate::vm::EbpfVm<$ContextObject>>())
357 };
358 let config = vm.loader.get_config();
359 if config.enable_instruction_meter {
360 vm.context_object_pointer.consume(vm.previous_instruction_meter - vm.due_insn_count);
361 }
362 let converted_result: $crate::error::ProgramResult = Self::rust $(::<$($generic_ident),+>)?(
363 vm.context_object_pointer, $arg_a, $arg_b, $arg_c, $arg_d, $arg_e, &mut vm.memory_mapping,
364 ).map_err(|err| $crate::error::EbpfError::SyscallError(err)).into();
365 vm.program_result = converted_result;
366 if config.enable_instruction_meter {
367 vm.previous_instruction_meter = vm.context_object_pointer.get_remaining();
368 }
369 }
370 }
371 };
372}