1use core::{cell::UnsafeCell, marker::PhantomData, mem, ptr::NonNull};
2
3use aya_ebpf_bindings::bindings::bpf_map_type::{
4 BPF_MAP_TYPE_LRU_HASH, BPF_MAP_TYPE_LRU_PERCPU_HASH, BPF_MAP_TYPE_PERCPU_HASH,
5};
6use aya_ebpf_cty::{c_long, c_void};
7
8use crate::{
9 bindings::{bpf_map_def, bpf_map_type::BPF_MAP_TYPE_HASH},
10 helpers::{bpf_map_delete_elem, bpf_map_lookup_elem, bpf_map_update_elem},
11 maps::PinningType,
12};
13
14#[repr(transparent)]
15pub struct HashMap<K, V> {
16 def: UnsafeCell<bpf_map_def>,
17 _k: PhantomData<K>,
18 _v: PhantomData<V>,
19}
20
21unsafe impl<K: Sync, V: Sync> Sync for HashMap<K, V> {}
22
23impl<K, V> HashMap<K, V> {
24 pub const fn with_max_entries(max_entries: u32, flags: u32) -> HashMap<K, V> {
25 HashMap {
26 def: UnsafeCell::new(build_def::<K, V>(
27 BPF_MAP_TYPE_HASH,
28 max_entries,
29 flags,
30 PinningType::None,
31 )),
32 _k: PhantomData,
33 _v: PhantomData,
34 }
35 }
36
37 pub const fn pinned(max_entries: u32, flags: u32) -> HashMap<K, V> {
38 HashMap {
39 def: UnsafeCell::new(build_def::<K, V>(
40 BPF_MAP_TYPE_HASH,
41 max_entries,
42 flags,
43 PinningType::ByName,
44 )),
45 _k: PhantomData,
46 _v: PhantomData,
47 }
48 }
49
50 #[inline]
56 pub unsafe fn get(&self, key: &K) -> Option<&V> {
57 get(self.def.get(), key)
58 }
59
60 #[inline]
64 pub fn get_ptr(&self, key: &K) -> Option<*const V> {
65 get_ptr(self.def.get(), key)
66 }
67
68 #[inline]
73 pub fn get_ptr_mut(&self, key: &K) -> Option<*mut V> {
74 get_ptr_mut(self.def.get(), key)
75 }
76
77 #[inline]
78 pub fn insert(&self, key: &K, value: &V, flags: u64) -> Result<(), c_long> {
79 insert(self.def.get(), key, value, flags)
80 }
81
82 #[inline]
83 pub fn remove(&self, key: &K) -> Result<(), c_long> {
84 remove(self.def.get(), key)
85 }
86}
87
88#[repr(transparent)]
89pub struct LruHashMap<K, V> {
90 def: UnsafeCell<bpf_map_def>,
91 _k: PhantomData<K>,
92 _v: PhantomData<V>,
93}
94
95unsafe impl<K: Sync, V: Sync> Sync for LruHashMap<K, V> {}
96
97impl<K, V> LruHashMap<K, V> {
98 pub const fn with_max_entries(max_entries: u32, flags: u32) -> LruHashMap<K, V> {
99 LruHashMap {
100 def: UnsafeCell::new(build_def::<K, V>(
101 BPF_MAP_TYPE_LRU_HASH,
102 max_entries,
103 flags,
104 PinningType::None,
105 )),
106 _k: PhantomData,
107 _v: PhantomData,
108 }
109 }
110
111 pub const fn pinned(max_entries: u32, flags: u32) -> LruHashMap<K, V> {
112 LruHashMap {
113 def: UnsafeCell::new(build_def::<K, V>(
114 BPF_MAP_TYPE_LRU_HASH,
115 max_entries,
116 flags,
117 PinningType::ByName,
118 )),
119 _k: PhantomData,
120 _v: PhantomData,
121 }
122 }
123
124 #[inline]
130 pub unsafe fn get(&self, key: &K) -> Option<&V> {
131 get(self.def.get(), key)
132 }
133
134 #[inline]
138 pub fn get_ptr(&self, key: &K) -> Option<*const V> {
139 get_ptr(self.def.get(), key)
140 }
141
142 #[inline]
147 pub fn get_ptr_mut(&self, key: &K) -> Option<*mut V> {
148 get_ptr_mut(self.def.get(), key)
149 }
150
151 #[inline]
152 pub fn insert(&self, key: &K, value: &V, flags: u64) -> Result<(), c_long> {
153 insert(self.def.get(), key, value, flags)
154 }
155
156 #[inline]
157 pub fn remove(&self, key: &K) -> Result<(), c_long> {
158 remove(self.def.get(), key)
159 }
160}
161
162#[repr(transparent)]
163pub struct PerCpuHashMap<K, V> {
164 def: UnsafeCell<bpf_map_def>,
165 _k: PhantomData<K>,
166 _v: PhantomData<V>,
167}
168
169unsafe impl<K, V> Sync for PerCpuHashMap<K, V> {}
170
171impl<K, V> PerCpuHashMap<K, V> {
172 pub const fn with_max_entries(max_entries: u32, flags: u32) -> PerCpuHashMap<K, V> {
173 PerCpuHashMap {
174 def: UnsafeCell::new(build_def::<K, V>(
175 BPF_MAP_TYPE_PERCPU_HASH,
176 max_entries,
177 flags,
178 PinningType::None,
179 )),
180 _k: PhantomData,
181 _v: PhantomData,
182 }
183 }
184
185 pub const fn pinned(max_entries: u32, flags: u32) -> PerCpuHashMap<K, V> {
186 PerCpuHashMap {
187 def: UnsafeCell::new(build_def::<K, V>(
188 BPF_MAP_TYPE_PERCPU_HASH,
189 max_entries,
190 flags,
191 PinningType::ByName,
192 )),
193 _k: PhantomData,
194 _v: PhantomData,
195 }
196 }
197
198 #[inline]
204 pub unsafe fn get(&self, key: &K) -> Option<&V> {
205 get(self.def.get(), key)
206 }
207
208 #[inline]
212 pub fn get_ptr(&self, key: &K) -> Option<*const V> {
213 get_ptr(self.def.get(), key)
214 }
215
216 #[inline]
221 pub fn get_ptr_mut(&self, key: &K) -> Option<*mut V> {
222 get_ptr_mut(self.def.get(), key)
223 }
224
225 #[inline]
226 pub fn insert(&self, key: &K, value: &V, flags: u64) -> Result<(), c_long> {
227 insert(self.def.get(), key, value, flags)
228 }
229
230 #[inline]
231 pub fn remove(&self, key: &K) -> Result<(), c_long> {
232 remove(self.def.get(), key)
233 }
234}
235
236#[repr(transparent)]
237pub struct LruPerCpuHashMap<K, V> {
238 def: UnsafeCell<bpf_map_def>,
239 _k: PhantomData<K>,
240 _v: PhantomData<V>,
241}
242
243unsafe impl<K, V> Sync for LruPerCpuHashMap<K, V> {}
244
245impl<K, V> LruPerCpuHashMap<K, V> {
246 pub const fn with_max_entries(max_entries: u32, flags: u32) -> LruPerCpuHashMap<K, V> {
247 LruPerCpuHashMap {
248 def: UnsafeCell::new(build_def::<K, V>(
249 BPF_MAP_TYPE_LRU_PERCPU_HASH,
250 max_entries,
251 flags,
252 PinningType::None,
253 )),
254 _k: PhantomData,
255 _v: PhantomData,
256 }
257 }
258
259 pub const fn pinned(max_entries: u32, flags: u32) -> LruPerCpuHashMap<K, V> {
260 LruPerCpuHashMap {
261 def: UnsafeCell::new(build_def::<K, V>(
262 BPF_MAP_TYPE_LRU_PERCPU_HASH,
263 max_entries,
264 flags,
265 PinningType::ByName,
266 )),
267 _k: PhantomData,
268 _v: PhantomData,
269 }
270 }
271
272 #[inline]
278 pub unsafe fn get(&self, key: &K) -> Option<&V> {
279 get(self.def.get(), key)
280 }
281
282 #[inline]
286 pub fn get_ptr(&self, key: &K) -> Option<*const V> {
287 get_ptr(self.def.get(), key)
288 }
289
290 #[inline]
295 pub fn get_ptr_mut(&self, key: &K) -> Option<*mut V> {
296 get_ptr_mut(self.def.get(), key)
297 }
298
299 #[inline]
300 pub fn insert(&self, key: &K, value: &V, flags: u64) -> Result<(), c_long> {
301 insert(self.def.get(), key, value, flags)
302 }
303
304 #[inline]
305 pub fn remove(&self, key: &K) -> Result<(), c_long> {
306 remove(self.def.get(), key)
307 }
308}
309
310const fn build_def<K, V>(ty: u32, max_entries: u32, flags: u32, pin: PinningType) -> bpf_map_def {
311 bpf_map_def {
312 type_: ty,
313 key_size: mem::size_of::<K>() as u32,
314 value_size: mem::size_of::<V>() as u32,
315 max_entries,
316 map_flags: flags,
317 id: 0,
318 pinning: pin as u32,
319 }
320}
321
322#[inline]
323fn get_ptr_mut<K, V>(def: *mut bpf_map_def, key: &K) -> Option<*mut V> {
324 unsafe {
325 let value = bpf_map_lookup_elem(def as *mut _, key as *const _ as *const c_void);
326 NonNull::new(value as *mut V).map(|p| p.as_ptr())
328 }
329}
330
331#[inline]
332fn get_ptr<K, V>(def: *mut bpf_map_def, key: &K) -> Option<*const V> {
333 get_ptr_mut(def, key).map(|p| p as *const V)
334}
335
336#[inline]
337unsafe fn get<'a, K, V>(def: *mut bpf_map_def, key: &K) -> Option<&'a V> {
338 get_ptr(def, key).map(|p| &*p)
339}
340
341#[inline]
342fn insert<K, V>(def: *mut bpf_map_def, key: &K, value: &V, flags: u64) -> Result<(), c_long> {
343 let ret = unsafe {
344 bpf_map_update_elem(
345 def as *mut _,
346 key as *const _ as *const _,
347 value as *const _ as *const _,
348 flags,
349 )
350 };
351 (ret == 0).then_some(()).ok_or(ret)
352}
353
354#[inline]
355fn remove<K>(def: *mut bpf_map_def, key: &K) -> Result<(), c_long> {
356 let ret = unsafe { bpf_map_delete_elem(def as *mut _, key as *const _ as *const c_void) };
357 (ret == 0).then_some(()).ok_or(ret)
358}