1use std::{hint::assert_unchecked, mem::MaybeUninit};
9
10use arrayvec::ArrayVec;
11use bitflags::bitflags;
12
13use crate::{
14 bindings::{
15 CLIB_LOG2_CACHE_LINE_BYTES, VLIB_BUFFER_EXT_HDR_VALID, VLIB_BUFFER_IS_TRACED,
16 VLIB_BUFFER_MIN_CHAIN_SEG_SIZE, VLIB_BUFFER_NEXT_PRESENT, VLIB_BUFFER_PRE_DATA_SIZE,
17 VLIB_BUFFER_TOTAL_LENGTH_VALID, vlib_add_trace, vlib_buffer_func_main, vlib_buffer_t,
18 vlib_buffer_t__bindgen_ty_1, vlib_buffer_t__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1,
19 },
20 vlib::{
21 self, MainRef,
22 node::{ErrorCounters, Node, NodeRuntimeRef, VectorBufferIndex},
23 },
24 vppinfra::{
25 cache::{prefetch_load, prefetch_store},
26 likely,
27 },
28};
29
30#[cfg(feature = "experimental")]
31use crate::bindings::{vlib_helper_buffer_alloc, vlib_helper_buffer_free};
32#[cfg(feature = "experimental")]
33use std::fmt;
34
35#[repr(transparent)]
37#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
38pub struct BufferIndex(u32);
39
40impl BufferIndex {
41 pub const fn new(buffer: u32) -> Self {
43 Self(buffer)
44 }
45}
46
47impl From<u32> for BufferIndex {
48 fn from(value: u32) -> BufferIndex {
49 Self(value)
50 }
51}
52
53impl From<BufferIndex> for u32 {
54 fn from(value: BufferIndex) -> Self {
55 value.0
56 }
57}
58
59impl VectorBufferIndex for BufferIndex {
60 fn as_u32_slice(slice: &[Self]) -> &[u32] {
61 unsafe { std::mem::transmute::<&[BufferIndex], &[u32]>(slice) }
64 }
65}
66
67bitflags! {
68 #[repr(transparent)]
70 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
71 pub struct BufferFlags: u32 {
72 const IS_TRACED = VLIB_BUFFER_IS_TRACED;
74 const NEXT_PRESENT = VLIB_BUFFER_NEXT_PRESENT;
76 const TOTAL_LENGTH_VALID = VLIB_BUFFER_TOTAL_LENGTH_VALID;
78 const EXT_HDR_VALID = VLIB_BUFFER_EXT_HDR_VALID;
80
81 const _ = !0;
83 }
84}
85
86pub const fn vlib_buffer_flag_user(n: u32) -> u32 {
90 assert!(n < 29 && n > 0);
91 1 << (32 - n)
92}
93
94#[repr(transparent)]
99pub struct BufferRef<FeatureData>(foreign_types::Opaque, std::marker::PhantomData<FeatureData>);
100
101impl<FeatureData> BufferRef<FeatureData> {
102 #[inline(always)]
110 pub unsafe fn from_ptr<'a>(ptr: *mut vlib_buffer_t) -> &'a Self {
111 unsafe { &*(ptr as *mut _) }
113 }
114
115 #[inline(always)]
123 pub unsafe fn from_ptr_mut<'a>(ptr: *mut vlib_buffer_t) -> &'a mut Self {
124 unsafe { &mut *(ptr as *mut _) }
126 }
127
128 pub fn as_ptr(&self) -> *mut vlib_buffer_t {
130 self as *const _ as *mut _
131 }
132
133 fn as_details(&self) -> &vlib_buffer_t__bindgen_ty_1 {
134 unsafe { (*self.as_ptr()).__bindgen_anon_1.as_ref() }
139 }
140
141 fn as_details_mut(&mut self) -> &mut vlib_buffer_t__bindgen_ty_1 {
142 unsafe { (*self.as_ptr()).__bindgen_anon_1.as_mut() }
147 }
148
149 pub(crate) fn as_metadata(&self) -> &vlib_buffer_t__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1 {
150 unsafe { self.as_details().__bindgen_anon_1.__bindgen_anon_1.as_ref() }
155 }
156
157 pub(crate) fn as_metadata_mut(
158 &mut self,
159 ) -> &mut vlib_buffer_t__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1 {
160 unsafe {
165 self.as_details_mut()
166 .__bindgen_anon_1
167 .__bindgen_anon_1
168 .as_mut()
169 }
170 }
171
172 fn data(&self) -> *const u8 {
173 self.as_details().data.as_ptr()
174 }
175
176 fn current_data_offset(&self) -> i16 {
177 self.as_metadata().current_data
178 }
179
180 fn current_data_offset_mut(&mut self) -> &mut i16 {
181 &mut self.as_metadata_mut().current_data
182 }
183
184 pub fn current_length(&self) -> u16 {
188 self.as_metadata().current_length
189 }
190
191 fn current_length_mut(&mut self) -> &mut u16 {
192 &mut self.as_metadata_mut().current_length
193 }
194
195 pub fn flags(&self) -> BufferFlags {
197 BufferFlags::from_bits_retain(self.as_metadata().flags)
198 }
199
200 pub fn current_ptr_mut(&mut self) -> *mut u8 {
211 let data = self.data().cast_mut();
212 let current_data = self.current_data_offset();
213
214 debug_assert!(current_data >= -(VLIB_BUFFER_PRE_DATA_SIZE as i16));
215
216 unsafe { data.offset(current_data as isize) }
219 }
220
221 pub fn has_space(&self, l: i16) -> bool {
225 self.current_length() >= l as u16
226 }
227
228 pub unsafe fn advance(&mut self, l: i16) {
238 debug_assert!(l < 0 || self.current_length() >= l as u16);
239 debug_assert!(
240 l >= 0 || self.current_data_offset() + VLIB_BUFFER_PRE_DATA_SIZE as i16 >= -l
241 );
242
243 *self.current_data_offset_mut() += l;
244 if l >= 0 {
245 *self.current_length_mut() -= l as u16;
246 } else {
247 *self.current_length_mut() += -l as u16;
248 }
249
250 debug_assert!(
251 !self.flags().contains(BufferFlags::NEXT_PRESENT)
252 || self.current_length() >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE as u16
253 );
254 }
255
256 pub fn tail_mut(&mut self) -> *mut u8 {
260 let data = self.data().cast_mut();
261 let current_data = self.current_data_offset();
262
263 debug_assert!(current_data >= -(VLIB_BUFFER_PRE_DATA_SIZE as i16));
264
265 unsafe {
269 let ptr = data.offset(current_data as isize);
270 ptr.add(self.current_length() as usize)
271 }
272 }
273
274 pub fn add_trace<N: Node>(
276 &mut self,
277 vm: &MainRef,
278 node: &NodeRuntimeRef<N>,
279 ) -> &mut MaybeUninit<N::TraceData> {
280 unsafe {
283 &mut *(vlib_add_trace(
284 vm.as_ptr(),
285 node.as_ptr(),
286 self.as_ptr(),
287 std::mem::size_of::<N::TraceData>() as u32,
288 ) as *mut MaybeUninit<N::TraceData>)
289 }
290 }
291
292 pub fn set_error<N: Node>(&mut self, node: &NodeRuntimeRef<N>, error: N::Errors) {
298 unsafe {
302 let error_value = (*node.as_ptr()).errors.add(error.into_u16() as usize);
303 self.as_metadata_mut().error = *error_value;
304 }
305 }
306
307 #[inline(always)]
309 pub fn total_length_not_including_first_buffer(&self) -> u32 {
310 debug_assert!(self.flags().contains(BufferFlags::TOTAL_LENGTH_VALID));
311 self.as_details().total_length_not_including_first_buffer
312 }
313
314 #[inline(always)]
319 pub fn length_in_chain(&self, vm: &vlib::MainRef) -> u64 {
320 let len = self.current_length();
321
322 if likely(!self.flags().contains(BufferFlags::NEXT_PRESENT)) {
323 return len as u64;
324 }
325
326 if likely(self.flags().contains(BufferFlags::TOTAL_LENGTH_VALID)) {
327 return len as u64 + self.total_length_not_including_first_buffer() as u64;
328 }
329
330 unsafe {
332 crate::bindings::vlib_buffer_length_in_chain_slow_path(vm.as_ptr(), self.as_ptr())
333 }
334 }
335
336 pub fn prefetch_header_load(&self) {
343 prefetch_load(self.as_ptr());
344 }
345
346 pub fn prefetch_header_store(&self) {
351 prefetch_store(self.as_ptr());
352 }
353
354 pub fn prefetch_data_load(&self) {
360 prefetch_load(&self.as_details().data);
361 }
362
363 pub fn prefetch_data_store(&self) {
368 prefetch_store(&self.as_details().data);
369 }
370}
371
372#[cfg(feature = "experimental")]
376pub struct BufferWithContext<'a> {
377 buffer: u32,
378 vm: &'a MainRef,
379}
380
381#[cfg(feature = "experimental")]
382impl<'a> BufferWithContext<'a> {
383 pub unsafe fn from_parts(buffer: u32, vm: &'a MainRef) -> Self {
389 Self { buffer, vm }
390 }
391
392 pub fn as_buffer_ref(&mut self) -> &mut BufferRef<()> {
394 let from = &[self.buffer];
395 let mut b: ArrayVec<_, 1> = ArrayVec::new();
396 unsafe {
400 self.vm.get_buffers(from, &mut b);
401 }
402 b.remove(0)
403 }
404}
405
406#[cfg(feature = "experimental")]
407impl Drop for BufferWithContext<'_> {
408 fn drop(&mut self) {
409 unsafe {
413 vlib_helper_buffer_free(self.vm.as_ptr(), &mut self.buffer, 1);
414 }
415 }
416}
417
418#[derive(Copy, Clone, PartialEq, Eq, Debug)]
420#[cfg(feature = "experimental")]
421pub struct BufferAllocError;
422
423#[cfg(feature = "experimental")]
424impl fmt::Display for BufferAllocError {
425 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
426 write!(f, "buffer allocation error")
427 }
428}
429
430#[cfg(feature = "experimental")]
431impl std::error::Error for BufferAllocError {}
432
433#[allow(non_camel_case_types)]
441pub(crate) struct u64x8([u64; 8]);
442
443impl u64x8 {
444 #[inline(always)]
446 pub(crate) fn from_array(a: [u64; 8]) -> Self {
447 Self(a)
448 }
449
450 #[inline(always)]
452 pub(crate) unsafe fn from_u32_ptr(ptr: *const u32) -> Self {
453 unsafe {
455 Self([
456 *ptr.add(0) as u64,
457 *ptr.add(1) as u64,
458 *ptr.add(2) as u64,
459 *ptr.add(3) as u64,
460 *ptr.add(4) as u64,
461 *ptr.add(5) as u64,
462 *ptr.add(6) as u64,
463 *ptr.add(7) as u64,
464 ])
465 }
466 }
467
468 #[inline(always)]
470 pub(crate) fn shift_elements_left<const OFFSET: u32>(&mut self) {
471 for a in &mut self.0 {
472 *a <<= OFFSET;
473 }
474 }
475
476 #[inline(always)]
478 pub(crate) fn add_u64(&self, value: u64) -> Self {
479 Self::from_array([
480 self.0[0] + value,
481 self.0[1] + value,
482 self.0[2] + value,
483 self.0[3] + value,
484 self.0[4] + value,
485 self.0[5] + value,
486 self.0[6] + value,
487 self.0[7] + value,
488 ])
489 }
490
491 #[inline(always)]
493 pub(crate) unsafe fn store(&self, ptr: *mut u64) {
494 unsafe {
496 *ptr.add(0) = self.0[0];
497 *ptr.add(1) = self.0[1];
498 *ptr.add(2) = self.0[2];
499 *ptr.add(3) = self.0[3];
500 *ptr.add(4) = self.0[4];
501 *ptr.add(5) = self.0[5];
502 *ptr.add(6) = self.0[6];
503 *ptr.add(7) = self.0[7];
504 }
505 }
506}
507
508const fn next_multiple_of_pow2(val: usize, pow2: usize) -> usize {
510 debug_assert!(pow2.is_power_of_two());
511 (val + pow2 - 1) & !(pow2 - 1)
512}
513
514impl MainRef {
515 #[inline(always)]
536 pub unsafe fn get_buffers<'a, 'me, 'buf: 'me, FeatureData, const N: usize>(
537 &'me self,
538 from_indices: &'a [u32],
539 to: &mut ArrayVec<&'buf mut BufferRef<FeatureData>, N>,
540 ) {
541 unsafe {
543 debug_assert!(from_indices.len() <= N);
544 assert_unchecked(from_indices.len() <= N);
545
546 #[cfg(debug_assertions)]
547 for from_index in from_indices {
548 let buffer_mem_size = (*(*self.as_ptr()).buffer_main).buffer_mem_size;
549 debug_assert!(
550 ((*from_index << CLIB_LOG2_CACHE_LINE_BYTES) as u64) < buffer_mem_size
551 );
552 }
553
554 let buffer_mem_start = (*(*self.as_ptr()).buffer_main).buffer_mem_start;
555
556 if !N.is_multiple_of(8) {
562 let base = buffer_mem_start as *const i8;
563 for from_index in from_indices.iter() {
564 let ptr = base.add((*from_index << CLIB_LOG2_CACHE_LINE_BYTES) as usize)
565 as *mut vlib_buffer_t;
566 to.push_unchecked(BufferRef::from_ptr_mut(ptr));
567 }
568 return;
569 }
570
571 let mut len = from_indices.len();
572 len = next_multiple_of_pow2(len, 8);
573
574 let mut from_index = from_indices.as_ptr();
575 let mut to_ptr = to.as_mut_ptr();
576
577 while len >= 64 {
578 let mut from_index_x8_1 = u64x8::from_u32_ptr(from_index);
579 let mut from_index_x8_2 = u64x8::from_u32_ptr(from_index.add(8));
580 let mut from_index_x8_3 = u64x8::from_u32_ptr(from_index.add(2 * 8));
581 let mut from_index_x8_4 = u64x8::from_u32_ptr(from_index.add(3 * 8));
582 let mut from_index_x8_5 = u64x8::from_u32_ptr(from_index.add(4 * 8));
583 let mut from_index_x8_6 = u64x8::from_u32_ptr(from_index.add(5 * 8));
584 let mut from_index_x8_7 = u64x8::from_u32_ptr(from_index.add(6 * 8));
585 let mut from_index_x8_8 = u64x8::from_u32_ptr(from_index.add(7 * 8));
586
587 from_index_x8_1.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
588 from_index_x8_2.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
589 from_index_x8_3.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
590 from_index_x8_4.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
591 from_index_x8_5.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
592 from_index_x8_6.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
593 from_index_x8_7.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
594 from_index_x8_8.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
595
596 let buf_ptr_x8_1 = from_index_x8_1.add_u64(buffer_mem_start);
597 let buf_ptr_x8_2 = from_index_x8_2.add_u64(buffer_mem_start);
598 let buf_ptr_x8_3 = from_index_x8_3.add_u64(buffer_mem_start);
599 let buf_ptr_x8_4 = from_index_x8_4.add_u64(buffer_mem_start);
600 let buf_ptr_x8_5 = from_index_x8_5.add_u64(buffer_mem_start);
601 let buf_ptr_x8_6 = from_index_x8_6.add_u64(buffer_mem_start);
602 let buf_ptr_x8_7 = from_index_x8_7.add_u64(buffer_mem_start);
603 let buf_ptr_x8_8 = from_index_x8_8.add_u64(buffer_mem_start);
604
605 buf_ptr_x8_1.store(to_ptr as *mut u64);
606 buf_ptr_x8_2.store(to_ptr.add(8) as *mut u64);
607 buf_ptr_x8_3.store(to_ptr.add(2 * 8) as *mut u64);
608 buf_ptr_x8_4.store(to_ptr.add(3 * 8) as *mut u64);
609 buf_ptr_x8_5.store(to_ptr.add(4 * 8) as *mut u64);
610 buf_ptr_x8_6.store(to_ptr.add(5 * 8) as *mut u64);
611 buf_ptr_x8_7.store(to_ptr.add(6 * 8) as *mut u64);
612 buf_ptr_x8_8.store(to_ptr.add(7 * 8) as *mut u64);
613
614 to_ptr = to_ptr.add(64);
615 from_index = from_index.add(64);
616 len -= 64;
617 }
618
619 if likely(len >= 32) {
620 let mut from_index_x8_1 = u64x8::from_u32_ptr(from_index);
621 let mut from_index_x8_2 = u64x8::from_u32_ptr(from_index.add(8));
622 let mut from_index_x8_3 = u64x8::from_u32_ptr(from_index.add(2 * 8));
623 let mut from_index_x8_4 = u64x8::from_u32_ptr(from_index.add(3 * 8));
624
625 from_index_x8_1.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
626 from_index_x8_2.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
627 from_index_x8_3.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
628 from_index_x8_4.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
629
630 let buf_ptr_x8_1 = from_index_x8_1.add_u64(buffer_mem_start);
631 let buf_ptr_x8_2 = from_index_x8_2.add_u64(buffer_mem_start);
632 let buf_ptr_x8_3 = from_index_x8_3.add_u64(buffer_mem_start);
633 let buf_ptr_x8_4 = from_index_x8_4.add_u64(buffer_mem_start);
634
635 buf_ptr_x8_1.store(to_ptr as *mut u64);
636 buf_ptr_x8_2.store(to_ptr.add(8) as *mut u64);
637 buf_ptr_x8_3.store(to_ptr.add(2 * 8) as *mut u64);
638 buf_ptr_x8_4.store(to_ptr.add(3 * 8) as *mut u64);
639
640 to_ptr = to_ptr.add(32);
641 from_index = from_index.add(32);
642 len -= 32;
643 }
644
645 if likely(len >= 16) {
646 let mut from_index_x8_1 = u64x8::from_u32_ptr(from_index);
647 let mut from_index_x8_2 = u64x8::from_u32_ptr(from_index.add(8));
648
649 from_index_x8_1.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
650 from_index_x8_2.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
651
652 let buf_ptr_x8_1 = from_index_x8_1.add_u64(buffer_mem_start);
653 let buf_ptr_x8_2 = from_index_x8_2.add_u64(buffer_mem_start);
654
655 buf_ptr_x8_1.store(to_ptr as *mut u64);
656 buf_ptr_x8_2.store(to_ptr.add(8) as *mut u64);
657
658 to_ptr = to_ptr.add(16);
659 from_index = from_index.add(16);
660 len -= 16;
661 }
662
663 if likely(len > 0) {
664 let mut from_index_x8 = u64x8::from_u32_ptr(from_index);
665 from_index_x8.shift_elements_left::<CLIB_LOG2_CACHE_LINE_BYTES>();
666 let buf_ptr_x8 = from_index_x8.add_u64(buffer_mem_start);
667 buf_ptr_x8.store(to_ptr as *mut u64);
668 }
669
670 to.set_len(from_indices.len());
671 }
672 }
673
674 #[inline(always)]
691 pub unsafe fn buffer_enqueue_to_next<N: Node, V: VectorBufferIndex>(
692 &self,
693 node: &mut NodeRuntimeRef<N>,
694 from: &[V],
695 nexts: &[u16],
696 ) {
697 debug_assert_eq!(from.len(), nexts.len());
698 unsafe {
700 (vlib_buffer_func_main
701 .buffer_enqueue_to_next_fn
702 .unwrap_unchecked())(
703 self.as_ptr(),
704 node.as_ptr(),
705 VectorBufferIndex::as_u32_slice(from).as_ptr().cast_mut(),
706 nexts.as_ptr() as *mut u16,
707 from.len() as u64,
708 )
709 }
710 }
711
712 #[cfg(feature = "experimental")]
716 pub fn alloc_buffer(&self) -> Result<BufferWithContext<'_>, BufferAllocError> {
717 unsafe {
722 let mut buffer = 0;
723 let res = vlib_helper_buffer_alloc(self.as_ptr(), &mut buffer, 1);
724 if res == 1 {
725 Ok(BufferWithContext::from_parts(buffer, self))
726 } else {
727 Err(BufferAllocError)
728 }
729 }
730 }
731}
732
733#[cfg(test)]
734mod tests {
735 use arrayvec::ArrayVec;
736
737 use crate::{
738 bindings::{CLIB_LOG2_CACHE_LINE_BYTES, vlib_buffer_main_t, vlib_buffer_t, vlib_main_t},
739 vlib::{MainRef, node::FRAME_SIZE},
740 };
741
742 #[test]
743 fn get_buffers() {
744 let buffer = vlib_buffer_t::default();
745 let buffers = [buffer; 119];
746 let buffer_indices: ArrayVec<u32, 128> = (0..buffers.len() as u32)
747 .map(|n| {
748 n * (std::mem::size_of::<vlib_buffer_t>() as u32 >> CLIB_LOG2_CACHE_LINE_BYTES)
749 })
750 .collect();
751 let mut buffer_main = vlib_buffer_main_t {
752 buffer_mem_start: std::ptr::addr_of!(buffers) as u64,
753 buffer_mem_size: std::mem::size_of_val(&buffers) as u64,
754 ..vlib_buffer_main_t::default()
755 };
756 let mut main = vlib_main_t {
757 buffer_main: std::ptr::addr_of_mut!(buffer_main),
758 ..vlib_main_t::default()
759 };
760 unsafe {
763 let mut to = ArrayVec::new();
764 let main_ref = MainRef::from_ptr_mut(std::ptr::addr_of_mut!(main));
765 main_ref.get_buffers::<(), FRAME_SIZE>(&buffer_indices, &mut to);
766 let expected: Vec<&vlib_buffer_t> = buffers.iter().collect();
767 assert_eq!(to.len(), expected.len());
768 for (i, buf_ref) in to.iter().enumerate() {
769 assert!(
770 buf_ref.as_ptr().cast_const() == std::ptr::addr_of!(buffers[i]),
771 "Buffer index {i} pointers don't match: {:p} expected {:p}",
772 buf_ref.as_ptr(),
773 std::ptr::addr_of!(buffers[i])
774 );
775 }
776 }
777 }
778}