1use {
3 crate::{
4 ebpf,
5 elf::ElfError,
6 vm::{Config, ContextObject, EbpfVm},
7 },
8 std::collections::{btree_map::Entry, BTreeMap},
9};
10
11#[derive(Debug, PartialEq, PartialOrd, Eq, Clone, Copy)]
13pub enum SBPFVersion {
14 V0,
16 V1,
18 V2,
20 V3,
22 V4,
24 Reserved,
26}
27
28impl SBPFVersion {
29 pub fn dynamic_stack_frames(self) -> bool {
31 self >= SBPFVersion::V1
32 }
33
34 pub fn enable_pqr(self) -> bool {
36 self >= SBPFVersion::V2
37 }
38 pub fn explicit_sign_extension_of_results(self) -> bool {
40 self >= SBPFVersion::V2
41 }
42 pub fn swap_sub_reg_imm_operands(self) -> bool {
44 self >= SBPFVersion::V2
45 }
46 pub fn disable_neg(self) -> bool {
48 self >= SBPFVersion::V2
49 }
50
51 pub fn callx_uses_src_reg(self) -> bool {
53 self >= SBPFVersion::V2
54 }
55 pub fn disable_lddw(self) -> bool {
57 self >= SBPFVersion::V2
58 }
59 pub fn disable_le(self) -> bool {
61 self >= SBPFVersion::V2
62 }
63 pub fn move_memory_instruction_classes(self) -> bool {
65 self >= SBPFVersion::V2
66 }
67
68 pub fn static_syscalls(self) -> bool {
70 self >= SBPFVersion::V3
71 }
72 pub fn enable_stricter_verification(self) -> bool {
74 self >= SBPFVersion::V3
75 }
76 pub fn enable_stricter_elf_headers(self) -> bool {
78 self >= SBPFVersion::V3
79 }
80 pub fn enable_lower_bytecode_vaddr(self) -> bool {
82 self >= SBPFVersion::V3
83 }
84
85 pub fn reject_rodata_stack_overlap(self) -> bool {
88 self != SBPFVersion::V0
89 }
90
91 pub fn enable_elf_vaddr(self) -> bool {
93 self != SBPFVersion::V0
94 }
95
96 pub fn calculate_call_imm_target_pc(self, pc: usize, imm: i64) -> u32 {
99 if self.static_syscalls() {
100 (pc as i64).saturating_add(imm).saturating_add(1) as u32
101 } else {
102 imm as u32
103 }
104 }
105}
106
107#[derive(Debug, PartialEq, Eq)]
109pub struct FunctionRegistry<T> {
110 pub(crate) map: BTreeMap<u32, (Vec<u8>, T)>,
111}
112
113impl<T> Default for FunctionRegistry<T> {
114 fn default() -> Self {
115 Self {
116 map: BTreeMap::new(),
117 }
118 }
119}
120
121impl<T: Copy + PartialEq> FunctionRegistry<T> {
122 pub fn register_function(
124 &mut self,
125 key: u32,
126 name: impl Into<Vec<u8>>,
127 value: T,
128 ) -> Result<(), ElfError> {
129 match self.map.entry(key) {
130 Entry::Vacant(entry) => {
131 entry.insert((name.into(), value));
132 }
133 Entry::Occupied(entry) => {
134 if entry.get().1 != value {
135 return Err(ElfError::SymbolHashCollision(key));
136 }
137 }
138 }
139 Ok(())
140 }
141
142 pub(crate) fn register_function_hashed_legacy<C: ContextObject>(
144 &mut self,
145 loader: &BuiltinProgram<C>,
146 hash_symbol_name: bool,
147 name: impl Into<Vec<u8>>,
148 value: T,
149 ) -> Result<u32, ElfError>
150 where
151 usize: From<T>,
152 {
153 let name = name.into();
154 let config = loader.get_config();
155 let key = if hash_symbol_name {
156 let hash = if name == b"entrypoint" {
157 ebpf::hash_symbol_name(b"entrypoint")
158 } else {
159 ebpf::hash_symbol_name(&usize::from(value).to_le_bytes())
160 };
161 if loader.get_function_registry().lookup_by_key(hash).is_some() {
162 return Err(ElfError::SymbolHashCollision(hash));
163 }
164 hash
165 } else {
166 usize::from(value) as u32
167 };
168 self.register_function(
169 key,
170 if config.enable_symbol_and_section_labels || name == b"entrypoint" {
171 name
172 } else {
173 Vec::default()
174 },
175 value,
176 )?;
177 Ok(key)
178 }
179
180 pub fn unregister_function(&mut self, key: u32) {
182 self.map.remove(&key);
183 }
184
185 pub fn keys(&self) -> impl Iterator<Item = u32> + '_ {
187 self.map.keys().copied()
188 }
189
190 pub fn iter(&self) -> impl Iterator<Item = (u32, (&[u8], T))> + '_ {
192 self.map
193 .iter()
194 .map(|(key, (name, value))| (*key, (name.as_slice(), *value)))
195 }
196
197 pub fn lookup_by_key(&self, key: u32) -> Option<(&[u8], T)> {
199 self.map
201 .get(&key)
202 .map(|(function_name, value)| (function_name.as_slice(), *value))
203 }
204
205 pub fn lookup_by_name(&self, name: &[u8]) -> Option<(&[u8], T)> {
207 self.map
208 .values()
209 .find(|(function_name, _value)| function_name == name)
210 .map(|(function_name, value)| (function_name.as_slice(), *value))
211 }
212
213 pub fn mem_size(&self) -> usize {
215 std::mem::size_of::<Self>().saturating_add(self.map.iter().fold(
216 0,
217 |state: usize, (_, (name, value))| {
218 state.saturating_add(
219 std::mem::size_of_val(value).saturating_add(
220 std::mem::size_of_val(name).saturating_add(name.capacity()),
221 ),
222 )
223 },
224 ))
225 }
226}
227
228pub type BuiltinFunction<C> = fn(*mut EbpfVm<C>, u64, u64, u64, u64, u64);
230
231#[derive(Eq)]
233pub struct BuiltinProgram<C: ContextObject> {
234 config: Option<Box<Config>>,
236 sparse_registry: FunctionRegistry<BuiltinFunction<C>>,
238}
239
240impl<C: ContextObject> PartialEq for BuiltinProgram<C> {
241 fn eq(&self, other: &Self) -> bool {
242 self.config.eq(&other.config) && self.sparse_registry.eq(&other.sparse_registry)
243 }
244}
245
246impl<C: ContextObject> BuiltinProgram<C> {
247 pub fn new_loader(config: Config) -> Self {
249 Self {
250 config: Some(Box::new(config)),
251 sparse_registry: FunctionRegistry::default(),
252 }
253 }
254
255 pub fn new_builtin() -> Self {
257 Self {
258 config: None,
259 sparse_registry: FunctionRegistry::default(),
260 }
261 }
262
263 pub fn new_mock() -> Self {
265 Self {
266 config: Some(Box::default()),
267 sparse_registry: FunctionRegistry::default(),
268 }
269 }
270
271 pub fn get_config(&self) -> &Config {
273 self.config.as_ref().unwrap()
274 }
275
276 pub fn get_function_registry(&self) -> &FunctionRegistry<BuiltinFunction<C>> {
278 &self.sparse_registry
279 }
280
281 pub fn mem_size(&self) -> usize {
283 std::mem::size_of::<Self>()
284 .saturating_add(if self.config.is_some() {
285 std::mem::size_of::<Config>()
286 } else {
287 0
288 })
289 .saturating_add(self.sparse_registry.mem_size())
290 }
291
292 pub fn register_function(
294 &mut self,
295 name: &str,
296 value: BuiltinFunction<C>,
297 ) -> Result<(), ElfError> {
298 let key = ebpf::hash_symbol_name(name.as_bytes());
299 self.sparse_registry
300 .register_function(key, name, value)
301 .map(|_| ())
302 }
303}
304
305impl<C: ContextObject> std::fmt::Debug for BuiltinProgram<C> {
306 fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
307 unsafe {
308 writeln!(
309 f,
310 "registry: {:?}",
311 std::mem::transmute::<
313 &FunctionRegistry<BuiltinFunction<C>>,
314 &FunctionRegistry<usize>,
315 >(&self.sparse_registry),
316 )?;
317 }
318 Ok(())
319 }
320}
321
322#[macro_export]
324macro_rules! declare_builtin_function {
325 ($(#[$attr:meta])* $name:ident $(<$($generic_ident:tt : $generic_type:tt),+>)?, fn rust(
326 $vm:ident : &mut $ContextObject:ty,
327 $arg_a:ident : u64,
328 $arg_b:ident : u64,
329 $arg_c:ident : u64,
330 $arg_d:ident : u64,
331 $arg_e:ident : u64,
332 $memory_mapping:ident : &mut $MemoryMapping:ty,
333 ) -> $Result:ty { $($rust:tt)* }) => {
334 $(#[$attr])*
335 pub struct $name {}
336 impl $name {
337 pub fn rust $(<$($generic_ident : $generic_type),+>)? (
339 $vm: &mut $ContextObject,
340 $arg_a: u64,
341 $arg_b: u64,
342 $arg_c: u64,
343 $arg_d: u64,
344 $arg_e: u64,
345 $memory_mapping: &mut $MemoryMapping,
346 ) -> $Result {
347 $($rust)*
348 }
349 #[allow(clippy::too_many_arguments)]
351 pub fn vm $(<$($generic_ident : $generic_type),+>)? (
352 $vm: *mut $crate::vm::EbpfVm<$ContextObject>,
353 $arg_a: u64,
354 $arg_b: u64,
355 $arg_c: u64,
356 $arg_d: u64,
357 $arg_e: u64,
358 ) {
359 use $crate::vm::ContextObject;
360 let vm = unsafe {
361 &mut *($vm.cast::<u64>().offset(-($crate::vm::get_runtime_environment_key() as isize)).cast::<$crate::vm::EbpfVm<$ContextObject>>())
362 };
363 let config = vm.loader.get_config();
364 if config.enable_instruction_meter {
365 vm.context_object_pointer.consume(vm.previous_instruction_meter - vm.due_insn_count);
366 }
367 let converted_result: $crate::error::ProgramResult = Self::rust $(::<$($generic_ident),+>)?(
368 vm.context_object_pointer, $arg_a, $arg_b, $arg_c, $arg_d, $arg_e, &mut vm.memory_mapping,
369 ).map_err(|err| $crate::error::EbpfError::SyscallError(err)).into();
370 vm.program_result = converted_result;
371 if config.enable_instruction_meter {
372 vm.previous_instruction_meter = vm.context_object_pointer.get_remaining();
373 }
374 }
375 }
376 };
377}