1use std::{hint::assert_unchecked, mem::MaybeUninit};
9
10use arrayvec::ArrayVec;
11use bitflags::bitflags;
12
13use crate::{
14 bindings::{
15 vlib_add_trace, vlib_buffer_func_main, vlib_buffer_t, vlib_buffer_t__bindgen_ty_1,
16 vlib_buffer_t__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1, CLIB_LOG2_CACHE_LINE_BYTES,
17 VLIB_BUFFER_EXT_HDR_VALID, VLIB_BUFFER_IS_TRACED, VLIB_BUFFER_MIN_CHAIN_SEG_SIZE,
18 VLIB_BUFFER_NEXT_PRESENT, VLIB_BUFFER_PRE_DATA_SIZE, VLIB_BUFFER_TOTAL_LENGTH_VALID,
19 },
20 vlib::{
21 node::{ErrorCounters, Node, NodeRuntimeRef, VectorBufferIndex},
22 MainRef,
23 },
24 vppinfra::likely,
25};
26
27#[cfg(feature = "experimental")]
28use crate::bindings::{vlib_helper_buffer_alloc, vlib_helper_buffer_free};
29#[cfg(feature = "experimental")]
30use std::fmt;
31
32#[repr(transparent)]
34#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
35pub struct BufferIndex(u32);
36
37impl BufferIndex {
38 pub const fn new(buffer: u32) -> Self {
40 Self(buffer)
41 }
42}
43
44impl From<u32> for BufferIndex {
45 fn from(value: u32) -> BufferIndex {
46 Self(value)
47 }
48}
49
50impl From<BufferIndex> for u32 {
51 fn from(value: BufferIndex) -> Self {
52 value.0
53 }
54}
55
56impl VectorBufferIndex for BufferIndex {
57 fn as_u32_slice(slice: &[Self]) -> &[u32] {
58 unsafe { std::mem::transmute::<&[BufferIndex], &[u32]>(slice) }
61 }
62}
63
64bitflags! {
65 #[repr(transparent)]
67 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
68 pub struct BufferFlags: u32 {
69 const IS_TRACED = VLIB_BUFFER_IS_TRACED;
71 const NEXT_PRESENT = VLIB_BUFFER_NEXT_PRESENT;
73 const TOTAL_LENGTH_VALID = VLIB_BUFFER_TOTAL_LENGTH_VALID;
75 const EXT_HDR_VALID = VLIB_BUFFER_EXT_HDR_VALID;
77
78 const _ = !0;
80 }
81}
82
83pub const fn vlib_buffer_flag_user(n: u32) -> u32 {
87 assert!(n < 29 && n > 0);
88 1 << (32 - n)
89}
90
91#[repr(transparent)]
96pub struct BufferRef<FeatureData>(foreign_types::Opaque, std::marker::PhantomData<FeatureData>);
97
98impl<FeatureData> BufferRef<FeatureData> {
99 #[inline(always)]
107 pub unsafe fn from_ptr<'a>(ptr: *mut vlib_buffer_t) -> &'a Self {
108 &*(ptr as *mut _)
109 }
110
111 #[inline(always)]
119 pub unsafe fn from_ptr_mut<'a>(ptr: *mut vlib_buffer_t) -> &'a mut Self {
120 &mut *(ptr as *mut _)
121 }
122
123 pub fn as_ptr(&self) -> *mut vlib_buffer_t {
125 self as *const _ as *mut _
126 }
127
128 fn as_details(&self) -> &vlib_buffer_t__bindgen_ty_1 {
129 unsafe { (*self.as_ptr()).__bindgen_anon_1.as_ref() }
134 }
135
136 fn as_details_mut(&mut self) -> &mut vlib_buffer_t__bindgen_ty_1 {
137 unsafe { (*self.as_ptr()).__bindgen_anon_1.as_mut() }
142 }
143
144 pub(crate) fn as_metadata(&self) -> &vlib_buffer_t__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1 {
145 unsafe { self.as_details().__bindgen_anon_1.__bindgen_anon_1.as_ref() }
150 }
151
152 pub(crate) fn as_metadata_mut(
153 &mut self,
154 ) -> &mut vlib_buffer_t__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1 {
155 unsafe {
160 self.as_details_mut()
161 .__bindgen_anon_1
162 .__bindgen_anon_1
163 .as_mut()
164 }
165 }
166
167 fn data(&self) -> *const u8 {
168 self.as_details().data.as_ptr()
169 }
170
171 fn current_data_offset(&self) -> i16 {
172 self.as_metadata().current_data
173 }
174
175 fn current_data_offset_mut(&mut self) -> &mut i16 {
176 &mut self.as_metadata_mut().current_data
177 }
178
179 pub fn current_length(&self) -> u16 {
183 self.as_metadata().current_length
184 }
185
186 fn current_length_mut(&mut self) -> &mut u16 {
187 &mut self.as_metadata_mut().current_length
188 }
189
190 pub fn flags(&self) -> BufferFlags {
192 BufferFlags::from_bits_retain(self.as_metadata().flags)
193 }
194
195 pub fn current_ptr_mut(&mut self) -> *mut u8 {
206 let data = self.data().cast_mut();
207 let current_data = self.current_data_offset();
208
209 debug_assert!(current_data >= -(VLIB_BUFFER_PRE_DATA_SIZE as i16));
210
211 unsafe { data.offset(current_data as isize) }
214 }
215
216 pub fn has_space(&self, l: i16) -> bool {
219 self.current_length() >= l as u16
220 }
221
222 pub unsafe fn advance(&mut self, l: i16) {
231 debug_assert!(l < 0 || self.current_length() >= l as u16);
232 debug_assert!(
233 l >= 0 || self.current_data_offset() + VLIB_BUFFER_PRE_DATA_SIZE as i16 >= -l
234 );
235
236 *self.current_data_offset_mut() += l;
237 if l >= 0 {
238 *self.current_length_mut() -= l as u16;
239 } else {
240 *self.current_length_mut() += -l as u16;
241 }
242
243 debug_assert!(
244 !self.flags().contains(BufferFlags::NEXT_PRESENT)
245 || self.current_length() >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE as u16
246 );
247 }
248
249 pub fn tail_mut(&mut self) -> *mut u8 {
253 let data = self.data().cast_mut();
254 let current_data = self.current_data_offset();
255
256 debug_assert!(current_data >= -(VLIB_BUFFER_PRE_DATA_SIZE as i16));
257
258 unsafe {
262 let ptr = data.offset(current_data as isize);
263 ptr.add(self.current_length() as usize)
264 }
265 }
266
267 pub fn add_trace<N: Node>(
269 &mut self,
270 vm: &MainRef,
271 node: &NodeRuntimeRef<N>,
272 ) -> &mut MaybeUninit<N::TraceData> {
273 unsafe {
276 &mut *(vlib_add_trace(
277 vm.as_ptr(),
278 node.as_ptr(),
279 self.as_ptr(),
280 std::mem::size_of::<N::TraceData>() as u32,
281 ) as *mut MaybeUninit<N::TraceData>)
282 }
283 }
284
285 pub fn set_error<N: Node>(&mut self, node: &NodeRuntimeRef<N>, error: N::Errors) {
291 unsafe {
295 let error_value = (*node.as_ptr()).errors.add(error.into_u16() as usize);
296 self.as_metadata_mut().error = *error_value;
297 }
298 }
299}
300
301#[cfg(feature = "experimental")]
305pub struct BufferWithContext<'a> {
306 buffer: u32,
307 vm: &'a MainRef,
308}
309
310#[cfg(feature = "experimental")]
311impl<'a> BufferWithContext<'a> {
312 pub unsafe fn from_parts(buffer: u32, vm: &'a MainRef) -> Self {
318 Self { buffer, vm }
319 }
320
321 pub fn as_buffer_ref(&mut self) -> &mut BufferRef<()> {
323 let from = &[self.buffer];
324 let mut b: ArrayVec<_, 1> = ArrayVec::new();
325 unsafe {
329 self.vm.get_buffers(from, &mut b);
330 }
331 b.remove(0)
332 }
333}
334
335#[cfg(feature = "experimental")]
336impl Drop for BufferWithContext<'_> {
337 fn drop(&mut self) {
338 unsafe {
342 vlib_helper_buffer_free(self.vm.as_ptr(), &mut self.buffer, 1);
343 }
344 }
345}
346
347#[derive(Copy, Clone, PartialEq, Eq, Debug)]
349#[cfg(feature = "experimental")]
350pub struct BufferAllocError;
351
352#[cfg(feature = "experimental")]
353impl fmt::Display for BufferAllocError {
354 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
355 write!(f, "buffer allocation error")
356 }
357}
358
359#[cfg(feature = "experimental")]
360impl std::error::Error for BufferAllocError {}
361
362#[allow(non_camel_case_types)]
370pub(crate) struct u64x8([u64; 8]);
371
372impl u64x8 {
373 #[inline(always)]
375 pub(crate) fn from_array(a: [u64; 8]) -> Self {
376 Self(a)
377 }
378
379 #[inline(always)]
381 pub(crate) unsafe fn from_u32_ptr(ptr: *const u32) -> Self {
382 Self([
383 *ptr.add(0) as u64,
384 *ptr.add(1) as u64,
385 *ptr.add(2) as u64,
386 *ptr.add(3) as u64,
387 *ptr.add(4) as u64,
388 *ptr.add(5) as u64,
389 *ptr.add(6) as u64,
390 *ptr.add(7) as u64,
391 ])
392 }
393
394 #[inline(always)]
396 pub(crate) fn shift_elements_left<const OFFSET: u32>(&mut self) {
397 for a in &mut self.0 {
398 *a <<= OFFSET;
399 }
400 }
401
402 #[inline(always)]
404 pub(crate) fn add_u64(&self, value: u64) -> Self {
405 Self::from_array([
406 self.0[0] + value,
407 self.0[1] + value,
408 self.0[2] + value,
409 self.0[3] + value,
410 self.0[4] + value,
411 self.0[5] + value,
412 self.0[6] + value,
413 self.0[7] + value,
414 ])
415 }
416
417 #[inline(always)]
419 pub(crate) unsafe fn store(&self, ptr: *mut u64) {
420 *ptr.add(0) = self.0[0];
421 *ptr.add(1) = self.0[1];
422 *ptr.add(2) = self.0[2];
423 *ptr.add(3) = self.0[3];
424 *ptr.add(4) = self.0[4];
425 *ptr.add(5) = self.0[5];
426 *ptr.add(6) = self.0[6];
427 *ptr.add(7) = self.0[7];
428 }
429}
430
431const fn next_multiple_of_pow2(val: usize, pow2: usize) -> usize {
433 debug_assert!(pow2.is_power_of_two());
434 (val + pow2 - 1) & !(pow2 - 1)
435}
436
437impl MainRef {
438 #[inline(always)]
459 pub unsafe fn get_buffers<'a, 'me, 'buf: 'me, FeatureData, const N: usize>(
460 &'me self,
461 from_indices: &'a [u32],
462 to: &mut ArrayVec<&'buf mut BufferRef<FeatureData>, N>,
463 ) {
464 debug_assert!(from_indices.len() <= N);
465 assert_unchecked(from_indices.len() <= N);
466
467 #[cfg(debug_assertions)]
468 for from_index in from_indices {
469 let buffer_mem_size = (*(*self.as_ptr()).buffer_main).buffer_mem_size;
470 debug_assert!(((*from_index << CLIB_LOG2_CACHE_LINE_BYTES) as u64) < buffer_mem_size);
471 }
472
473 let buffer_mem_start = (*(*self.as_ptr()).buffer_main).buffer_mem_start;
474
475 if !N.is_multiple_of(8) {
481 let base = buffer_mem_start as *const i8;
482 for from_index in from_indices.iter() {
483 let ptr = base.add((*from_index << CLIB_LOG2_CACHE_LINE_BYTES) as usize)
484 as *mut vlib_buffer_t;
485 to.push_unchecked(BufferRef::from_ptr_mut(ptr));
486 }
487 return;
488 }
489
490 let mut len = from_indices.len();
491 len = next_multiple_of_pow2(len, 8);
492
493 let mut from_index = from_indices.as_ptr();
494 let mut to_ptr = to.as_mut_ptr();
495
496 while len >= 64 {
497 let mut from_index_x8_1 = u64x8::from_u32_ptr(from_index);
498 let mut from_index_x8_2 = u64x8::from_u32_ptr(from_index.add(8));
499 let mut from_index_x8_3 = u64x8::from_u32_ptr(from_index.add(2 * 8));
500 let mut from_index_x8_4 = u64x8::from_u32_ptr(from_index.add(3 * 8));
501 let mut from_index_x8_5 = u64x8::from_u32_ptr(from_index.add(4 * 8));
502 let mut from_index_x8_6 = u64x8::from_u32_ptr(from_index.add(5 * 8));
503 let mut from_index_x8_7 = u64x8::from_u32_ptr(from_index.add(6 * 8));
504 let mut from_index_x8_8 = u64x8::from_u32_ptr(from_index.add(7 * 8));
505
506 from_index_x8_1.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
507 from_index_x8_2.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
508 from_index_x8_3.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
509 from_index_x8_4.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
510 from_index_x8_5.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
511 from_index_x8_6.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
512 from_index_x8_7.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
513 from_index_x8_8.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
514
515 let buf_ptr_x8_1 = from_index_x8_1.add_u64(buffer_mem_start);
516 let buf_ptr_x8_2 = from_index_x8_2.add_u64(buffer_mem_start);
517 let buf_ptr_x8_3 = from_index_x8_3.add_u64(buffer_mem_start);
518 let buf_ptr_x8_4 = from_index_x8_4.add_u64(buffer_mem_start);
519 let buf_ptr_x8_5 = from_index_x8_5.add_u64(buffer_mem_start);
520 let buf_ptr_x8_6 = from_index_x8_6.add_u64(buffer_mem_start);
521 let buf_ptr_x8_7 = from_index_x8_7.add_u64(buffer_mem_start);
522 let buf_ptr_x8_8 = from_index_x8_8.add_u64(buffer_mem_start);
523
524 buf_ptr_x8_1.store(to_ptr as *mut u64);
525 buf_ptr_x8_2.store(to_ptr.add(8) as *mut u64);
526 buf_ptr_x8_3.store(to_ptr.add(2 * 8) as *mut u64);
527 buf_ptr_x8_4.store(to_ptr.add(3 * 8) as *mut u64);
528 buf_ptr_x8_5.store(to_ptr.add(4 * 8) as *mut u64);
529 buf_ptr_x8_6.store(to_ptr.add(5 * 8) as *mut u64);
530 buf_ptr_x8_7.store(to_ptr.add(6 * 8) as *mut u64);
531 buf_ptr_x8_8.store(to_ptr.add(7 * 8) as *mut u64);
532
533 to_ptr = to_ptr.add(64);
534 from_index = from_index.add(64);
535 len -= 64;
536 }
537
538 if likely(len >= 32) {
539 let mut from_index_x8_1 = u64x8::from_u32_ptr(from_index);
540 let mut from_index_x8_2 = u64x8::from_u32_ptr(from_index.add(8));
541 let mut from_index_x8_3 = u64x8::from_u32_ptr(from_index.add(2 * 8));
542 let mut from_index_x8_4 = u64x8::from_u32_ptr(from_index.add(3 * 8));
543
544 from_index_x8_1.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
545 from_index_x8_2.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
546 from_index_x8_3.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
547 from_index_x8_4.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
548
549 let buf_ptr_x8_1 = from_index_x8_1.add_u64(buffer_mem_start);
550 let buf_ptr_x8_2 = from_index_x8_2.add_u64(buffer_mem_start);
551 let buf_ptr_x8_3 = from_index_x8_3.add_u64(buffer_mem_start);
552 let buf_ptr_x8_4 = from_index_x8_4.add_u64(buffer_mem_start);
553
554 buf_ptr_x8_1.store(to_ptr as *mut u64);
555 buf_ptr_x8_2.store(to_ptr.add(8) as *mut u64);
556 buf_ptr_x8_3.store(to_ptr.add(2 * 8) as *mut u64);
557 buf_ptr_x8_4.store(to_ptr.add(3 * 8) as *mut u64);
558
559 to_ptr = to_ptr.add(32);
560 from_index = from_index.add(32);
561 len -= 32;
562 }
563
564 if likely(len >= 16) {
565 let mut from_index_x8_1 = u64x8::from_u32_ptr(from_index);
566 let mut from_index_x8_2 = u64x8::from_u32_ptr(from_index.add(8));
567
568 from_index_x8_1.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
569 from_index_x8_2.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
570
571 let buf_ptr_x8_1 = from_index_x8_1.add_u64(buffer_mem_start);
572 let buf_ptr_x8_2 = from_index_x8_2.add_u64(buffer_mem_start);
573
574 buf_ptr_x8_1.store(to_ptr as *mut u64);
575 buf_ptr_x8_2.store(to_ptr.add(8) as *mut u64);
576
577 to_ptr = to_ptr.add(16);
578 from_index = from_index.add(16);
579 len -= 16;
580 }
581
582 if likely(len > 0) {
583 let mut from_index_x8 = u64x8::from_u32_ptr(from_index);
584 from_index_x8.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
585 let buf_ptr_x8 = from_index_x8.add_u64(buffer_mem_start);
586 buf_ptr_x8.store(to_ptr as *mut u64);
587 }
588
589 to.set_len(from_indices.len());
590 }
591
592 #[inline(always)]
609 pub unsafe fn buffer_enqueue_to_next<N: Node, V: VectorBufferIndex>(
610 &self,
611 node: &mut NodeRuntimeRef<N>,
612 from: &[V],
613 nexts: &[u16],
614 ) {
615 debug_assert_eq!(from.len(), nexts.len());
616 unsafe {
618 (vlib_buffer_func_main
619 .buffer_enqueue_to_next_fn
620 .unwrap_unchecked())(
621 self.as_ptr(),
622 node.as_ptr(),
623 VectorBufferIndex::as_u32_slice(from).as_ptr().cast_mut(),
624 nexts.as_ptr() as *mut u16,
625 from.len() as u64,
626 )
627 }
628 }
629
630 #[cfg(feature = "experimental")]
634 pub fn alloc_buffer(&self) -> Result<BufferWithContext<'_>, BufferAllocError> {
635 unsafe {
640 let mut buffer = 0;
641 let res = vlib_helper_buffer_alloc(self.as_ptr(), &mut buffer, 1);
642 if res == 1 {
643 Ok(BufferWithContext::from_parts(buffer, self))
644 } else {
645 Err(BufferAllocError)
646 }
647 }
648 }
649}
650
651#[cfg(test)]
652mod tests {
653 use arrayvec::ArrayVec;
654
655 use crate::{
656 bindings::{vlib_buffer_main_t, vlib_buffer_t, vlib_main_t, CLIB_LOG2_CACHE_LINE_BYTES},
657 vlib::{node::FRAME_SIZE, MainRef},
658 };
659
660 #[test]
661 fn get_buffers() {
662 let buffer = vlib_buffer_t::default();
663 let buffers: [vlib_buffer_t; 65] = [buffer; 65];
664 let buffer_indices: ArrayVec<u32, 72> = (0..65)
665 .map(|n| {
666 n * (std::mem::size_of::<vlib_buffer_t>() as u32 >> CLIB_LOG2_CACHE_LINE_BYTES)
667 })
668 .collect();
669 let mut buffer_main = vlib_buffer_main_t {
670 buffer_mem_start: std::ptr::addr_of!(buffers) as u64,
671 buffer_mem_size: std::mem::size_of_val(&buffers) as u64,
672 ..vlib_buffer_main_t::default()
673 };
674 let mut main = vlib_main_t {
675 buffer_main: std::ptr::addr_of_mut!(buffer_main),
676 ..vlib_main_t::default()
677 };
678 unsafe {
681 let mut to = ArrayVec::new();
682 let main_ref = MainRef::from_ptr_mut(std::ptr::addr_of_mut!(main));
683 main_ref.get_buffers::<(), FRAME_SIZE>(&buffer_indices, &mut to);
684 let expected: Vec<&vlib_buffer_t> = buffers.iter().collect();
685 assert_eq!(to.len(), expected.len());
686 for (i, buf_ref) in to.iter().enumerate() {
687 assert!(
688 buf_ref.as_ptr().cast_const() == std::ptr::addr_of!(buffers[i]),
689 "Buffer index {i} pointers don't match: {:p} expected {:p}",
690 buf_ref.as_ptr(),
691 std::ptr::addr_of!(buffers[i])
692 );
693 }
694 }
695 }
696}