vpp_plugin/vlib/
node_generic.rs1use std::mem::MaybeUninit;
7
8use arrayvec::ArrayVec;
9
10use crate::{
11 vlib::{
12 self, BufferIndex, MainRef,
13 buffer::BufferRef,
14 node::{FRAME_SIZE, FrameRef, NextNodes, Node, NodeRuntimeRef},
15 },
16 vppinfra::{likely, unlikely},
17};
18
19#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
21pub enum FeatureNextNode<NextNode> {
22 DefinedNode(NextNode),
24 NextFeature,
26}
27
28impl<NextNode> From<NextNode> for FeatureNextNode<NextNode> {
29 fn from(value: NextNode) -> Self {
30 Self::DefinedNode(value)
31 }
32}
33
34pub trait GenericFeatureNodeX1<N: Node> {
36 unsafe fn map_buffer_to_next(
42 &self,
43 vm: &MainRef,
44 node: &mut NodeRuntimeRef<N>,
45 b0: &mut BufferRef<N::FeatureData>,
46 ) -> FeatureNextNode<N::NextNodes>;
47}
48
49#[inline(always)]
59pub unsafe fn generic_feature_node_x1<GenericNode, N, FeatureData>(
60 vm: &MainRef,
61 node: &mut NodeRuntimeRef<N>,
62 frame: &mut FrameRef<N>,
63 generic_node_impl: GenericNode,
64) -> u16
65where
66 N: Node<Vector = BufferIndex, Scalar = (), Aux = (), FeatureData = FeatureData>,
67 GenericNode: GenericFeatureNodeX1<N>,
68 FeatureData: Copy,
69{
70 unsafe {
72 let mut nexts: [MaybeUninit<u16>; FRAME_SIZE] = [MaybeUninit::uninit(); FRAME_SIZE];
73 let mut b = ArrayVec::new();
74
75 let from = frame.get_buffers::<FRAME_SIZE>(vm, &mut b);
76
77 for (i, b0) in b.iter_mut().enumerate() {
78 let next = generic_node_impl.map_buffer_to_next(vm, node, b0);
79 let next = match next {
80 FeatureNextNode::NextFeature => b0.vnet_feature_next().0 as u16,
81 FeatureNextNode::DefinedNode(next) => next.into_u16(),
82 };
83 nexts.get_unchecked_mut(i).write(next);
84 }
85
86 let initialized_nexts = nexts.get_unchecked(..b.len()).assume_init_ref();
91 vm.buffer_enqueue_to_next(node, from, initialized_nexts);
92
93 frame.vector().len() as u16
94 }
95}
96
97pub trait GenericFeatureNodeX4<N: Node>: GenericFeatureNodeX1<N> {
99 fn prefetch_buffer_x4(
101 &self,
102 _vm: &MainRef,
103 _node: &mut NodeRuntimeRef<N>,
104 b: &mut [&mut BufferRef<N::FeatureData>; 4],
105 ) {
106 b[0].prefetch_header_load();
109 b[1].prefetch_header_load();
110 b[2].prefetch_header_load();
111 b[3].prefetch_header_load();
112 }
113
114 unsafe fn map_buffer_to_next_x4(
120 &self,
121 vm: &MainRef,
122 node: &mut NodeRuntimeRef<N>,
123 b: &mut [&mut BufferRef<N::FeatureData>; 4],
124 ) -> [FeatureNextNode<N::NextNodes>; 4];
125
126 unsafe fn trace_buffer(
136 &self,
137 _vm: &MainRef,
138 _node: &mut NodeRuntimeRef<N>,
139 _b0: &mut BufferRef<N::FeatureData>,
140 ) {
141 }
142}
143
144#[inline(always)]
155pub unsafe fn generic_feature_node_x4<GenericNode, N, FeatureData>(
156 vm: &MainRef,
157 node: &mut NodeRuntimeRef<N>,
158 frame: &mut FrameRef<N>,
159 generic_node_impl: GenericNode,
160) -> u16
161where
162 N: Node<Vector = BufferIndex, Scalar = (), Aux = (), FeatureData = FeatureData>,
163 GenericNode: GenericFeatureNodeX4<N>,
164 FeatureData: Copy,
165{
166 unsafe {
168 let mut nexts: [MaybeUninit<u16>; FRAME_SIZE] = [MaybeUninit::uninit(); FRAME_SIZE];
169 let mut b = ArrayVec::new();
170
171 let from = frame.get_buffers::<FRAME_SIZE>(vm, &mut b);
172 let len = b.len();
173
174 for stride in 0..len / 4 {
175 let i = stride * 4;
176
177 if i + 8 <= len {
178 let stride_b = b.get_unchecked_mut(i + 4..i + 8);
179 let stride_b = stride_b.as_mut_array::<4>().unwrap_unchecked();
181
182 generic_node_impl.prefetch_buffer_x4(vm, node, stride_b);
183 }
184
185 let stride_b = b.get_unchecked_mut(i..i + 4);
186 let stride_nexts = nexts.get_unchecked_mut(i..i + 4);
187 let stride_b = stride_b.as_mut_array::<4>().unwrap_unchecked();
189 let stride_nexts = stride_nexts.as_mut_array::<4>().unwrap_unchecked();
190
191 if likely(
195 stride_b[0].vnet_buffer().feature_arc_index()
196 == stride_b[1].vnet_buffer().feature_arc_index()
197 && stride_b[0].vnet_buffer().feature_arc_index()
198 == stride_b[2].vnet_buffer().feature_arc_index()
199 && stride_b[0].vnet_buffer().feature_arc_index()
200 == stride_b[3].vnet_buffer().feature_arc_index(),
201 ) {
202 let feature_next = stride_b[0].vnet_feature_next().0 as u16;
203 stride_nexts[0].write(feature_next);
204 stride_nexts[1].write(feature_next);
205 stride_nexts[2].write(feature_next);
206 stride_nexts[3].write(feature_next);
207 } else {
208 stride_nexts[0].write(stride_b[0].vnet_feature_next().0 as u16);
209 stride_nexts[1].write(stride_b[1].vnet_feature_next().0 as u16);
210 stride_nexts[2].write(stride_b[2].vnet_feature_next().0 as u16);
211 stride_nexts[3].write(stride_b[3].vnet_feature_next().0 as u16);
212 };
213
214 let feat_nexts = generic_node_impl.map_buffer_to_next_x4(vm, node, stride_b);
215
216 for (next_i, next) in feat_nexts.into_iter().enumerate() {
217 match next {
218 FeatureNextNode::NextFeature => { }
219 FeatureNextNode::DefinedNode(next) => {
220 stride_nexts[next_i].write(next.into_u16());
221 }
222 };
223 }
224 }
225
226 for i in (len / 4) * 4..len {
227 let b0 = b.get_unchecked_mut(i);
228
229 let mut next_val = b0.vnet_feature_next().0 as u16;
231
232 let next = generic_node_impl.map_buffer_to_next(vm, node, b0);
233 match next {
234 FeatureNextNode::NextFeature => { }
235 FeatureNextNode::DefinedNode(next) => next_val = next.into_u16(),
236 };
237 nexts.get_unchecked_mut(i).write(next_val);
238 }
239
240 if unlikely(node.flags().contains(vlib::node::NodeFlags::TRACE)) {
243 for b0 in &mut b {
244 if b0.flags().contains(vlib::BufferFlags::IS_TRACED) {
245 generic_node_impl.trace_buffer(vm, node, b0);
246 }
247 }
248 }
249
250 let initialized_nexts = nexts.get_unchecked(..b.len()).assume_init_ref();
255 vm.buffer_enqueue_to_next(node, from, initialized_nexts);
256
257 frame.vector().len() as u16
258 }
259}