1use std::mem;
2use std::ptr;
3
4use byteorder::{ByteOrder, NativeEndian};
5
6use crate::core::hardware_address::HardwareAddress;
7use crate::errors::Result;
8use crate::errors::{NetlinkError, NetlinkErrorKind};
9
10#[inline]
11pub(crate) fn slice_copy(src: &[u8], dst: &mut [u8], length: usize) {
12 assert!(src.len() >= length);
13 assert!(dst.len() >= length);
14 unsafe {
15 ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), length);
16 }
17}
18
19pub trait NativeUnpack: Sized {
21 fn unpack(buffer: &[u8]) -> Result<Self> {
23 Self::unpack_with_size(buffer).and_then(|r| Ok(r.1))
24 }
25 fn unpack_with_size(buffer: &[u8]) -> Result<(usize, Self)> {
27 let size = mem::size_of::<Self>();
28 if buffer.len() < size {
29 return Err(NetlinkError::new(NetlinkErrorKind::NotEnoughData).into());
30 }
31 Ok((size, Self::unpack_unchecked(buffer)))
32 }
33 fn unpack_unchecked(buffer: &[u8]) -> Self;
35}
36
37impl NativeUnpack for u8 {
38 fn unpack_unchecked(buffer: &[u8]) -> Self {
39 buffer[0]
40 }
41}
42impl NativeUnpack for i8 {
43 fn unpack_unchecked(buffer: &[u8]) -> Self {
44 buffer[0] as i8
45 }
46}
47impl NativeUnpack for u16 {
48 fn unpack_unchecked(buffer: &[u8]) -> Self {
49 NativeEndian::read_u16(buffer)
50 }
51}
52impl NativeUnpack for i16 {
53 fn unpack_unchecked(buffer: &[u8]) -> Self {
54 NativeEndian::read_i16(buffer)
55 }
56}
57impl NativeUnpack for u32 {
58 fn unpack_unchecked(buffer: &[u8]) -> Self {
59 NativeEndian::read_u32(buffer)
60 }
61}
62impl NativeUnpack for i32 {
63 fn unpack_unchecked(buffer: &[u8]) -> Self {
64 NativeEndian::read_i32(buffer)
65 }
66}
67impl NativeUnpack for u64 {
68 fn unpack_unchecked(buffer: &[u8]) -> Self {
69 NativeEndian::read_u64(buffer)
70 }
71}
72impl NativeUnpack for i64 {
73 fn unpack_unchecked(buffer: &[u8]) -> Self {
74 NativeEndian::read_i64(buffer)
75 }
76}
77impl NativeUnpack for f32 {
78 fn unpack_unchecked(buffer: &[u8]) -> Self {
79 NativeEndian::read_f32(buffer)
80 }
81}
82impl NativeUnpack for f64 {
83 fn unpack_unchecked(buffer: &[u8]) -> Self {
84 NativeEndian::read_f64(buffer)
85 }
86}
87impl NativeUnpack for HardwareAddress {
88 fn unpack_unchecked(buffer: &[u8]) -> Self {
89 HardwareAddress::from(&buffer[0..6])
90 }
91}
92impl NativeUnpack for Vec<u8> {
93 fn unpack(buffer: &[u8]) -> Result<Self> {
94 Ok(Self::unpack_unchecked(buffer))
95 }
96 fn unpack_with_size(buffer: &[u8]) -> Result<(usize, Self)> {
97 Ok((buffer.len(), Self::unpack_unchecked(buffer)))
98 }
99 fn unpack_unchecked(buffer: &[u8]) -> Self {
100 buffer.to_vec()
101 }
102}
103impl NativeUnpack for Vec<u32> {
104 fn unpack_with_size(buffer: &[u8]) -> Result<(usize, Self)> {
105 let t_size = mem::size_of::<u32>();
106 let count = buffer.len() / t_size;
107 let mut vec = vec![];
108 for o in 0..count {
109 let offset = o * t_size;
110 vec.push(u32::unpack_unchecked(&buffer[offset..offset + t_size]))
111 }
112 Ok((count * t_size, vec))
113 }
114 fn unpack_unchecked(buffer: &[u8]) -> Self {
115 let r = Self::unpack_with_size(buffer).unwrap();
116 r.1
117 }
118}
119
120pub trait NativePack: Sized {
122 fn pack_size(&self) -> usize;
124 fn pack<'a>(&self, buffer: &'a mut [u8]) -> Result<&'a mut [u8]> {
126 let type_size = self.pack_size();
127 if buffer.len() < type_size {
128 return Err(NetlinkError::new(NetlinkErrorKind::NotEnoughData).into());
129 }
130 Self::pack_unchecked(&self, buffer);
131 Ok(&mut buffer[type_size..])
132 }
133 fn pack_unchecked(&self, buffer: &mut [u8]);
135}
136
137impl NativePack for u8 {
138 fn pack_size(&self) -> usize {
139 mem::size_of::<Self>()
140 }
141 fn pack_unchecked(&self, buffer: &mut [u8]) {
142 buffer[0] = *self;
143 }
144}
145impl NativePack for i8 {
146 fn pack_size(&self) -> usize {
147 mem::size_of::<Self>()
148 }
149 fn pack_unchecked(&self, buffer: &mut [u8]) {
150 buffer[0] = *self as u8;
151 }
152}
153impl NativePack for u16 {
154 fn pack_size(&self) -> usize {
155 mem::size_of::<Self>()
156 }
157 fn pack_unchecked(&self, buffer: &mut [u8]) {
158 NativeEndian::write_u16(buffer, *self);
159 }
160}
161impl NativePack for i16 {
162 fn pack_size(&self) -> usize {
163 mem::size_of::<Self>()
164 }
165 fn pack_unchecked(&self, buffer: &mut [u8]) {
166 NativeEndian::write_i16(buffer, *self);
167 }
168}
169impl NativePack for u32 {
170 fn pack_size(&self) -> usize {
171 mem::size_of::<Self>()
172 }
173 fn pack_unchecked(&self, buffer: &mut [u8]) {
174 NativeEndian::write_u32(buffer, *self);
175 }
176}
177impl NativePack for i32 {
178 fn pack_size(&self) -> usize {
179 mem::size_of::<Self>()
180 }
181 fn pack_unchecked(&self, buffer: &mut [u8]) {
182 NativeEndian::write_i32(buffer, *self);
183 }
184}
185impl NativePack for u64 {
186 fn pack_size(&self) -> usize {
187 mem::size_of::<Self>()
188 }
189 fn pack_unchecked(&self, buffer: &mut [u8]) {
190 NativeEndian::write_u64(buffer, *self);
191 }
192}
193impl NativePack for i64 {
194 fn pack_size(&self) -> usize {
195 mem::size_of::<Self>()
196 }
197 fn pack_unchecked(&self, buffer: &mut [u8]) {
198 NativeEndian::write_i64(buffer, *self);
199 }
200}
201impl NativePack for f32 {
202 fn pack_size(&self) -> usize {
203 mem::size_of::<Self>()
204 }
205 fn pack_unchecked(&self, buffer: &mut [u8]) {
206 NativeEndian::write_f32(buffer, *self);
207 }
208}
209impl NativePack for f64 {
210 fn pack_size(&self) -> usize {
211 mem::size_of::<Self>()
212 }
213 fn pack_unchecked(&self, buffer: &mut [u8]) {
214 NativeEndian::write_f64(buffer, *self);
215 }
216}
217impl NativePack for HardwareAddress {
218 fn pack_size(&self) -> usize {
219 mem::size_of::<Self>()
220 }
221 fn pack_unchecked(&self, buffer: &mut [u8]) {
222 unsafe {
223 ptr::copy_nonoverlapping(self.as_ptr(), buffer.as_mut_ptr(), 6);
224 }
225 }
226}
227impl NativePack for Vec<u8> {
228 fn pack_size(&self) -> usize {
229 self.len()
230 }
231 fn pack<'a>(&self, buffer: &'a mut [u8]) -> Result<&'a mut [u8]> {
232 let size = self.len();
233 if buffer.len() < size {
234 return Err(NetlinkError::new(NetlinkErrorKind::NotEnoughData).into());
235 }
236 Self::pack_unchecked(&self, buffer);
237 Ok(&mut buffer[size..])
238 }
239 fn pack_unchecked(&self, buffer: &mut [u8]) {
240 slice_copy(&self, buffer, self.len());
241 }
242}
243
244pub fn pack_vec<T: NativePack>(buffer: &mut [u8], v: &Vec<T>) -> Result<usize> {
246 let mut size = 0usize;
247 let mut slice = buffer;
248 for i in v {
249 slice = i.pack(slice)?;
250 size += i.pack_size();
251 }
252 Ok(size)
253}
254
255#[cfg(test)]
256mod tests {
257 use super::*;
258 use std::cmp;
259 use std::fmt;
260
261 fn pack_unpack_test<T>(bytes: &[u8], value: T)
262 where
263 T: NativePack + NativeUnpack + fmt::Debug + cmp::PartialEq + Sized,
264 {
265 let value_size = mem::size_of::<T>();
266 assert_eq!(bytes.len(), value_size);
267 let (unpacked_size, unpacked_value) = T::unpack_with_size(bytes).unwrap();
268 assert_eq!(unpacked_size, value_size);
269 assert_eq!(unpacked_value, value);
270 let unpacked_value = T::unpack(bytes).unwrap();
271 assert!(T::unpack(&bytes[..value_size - 1]).is_err());
272 assert_eq!(unpacked_value, value);
273 let unpacked_value = T::unpack_unchecked(bytes);
274 assert_eq!(unpacked_value, value);
275 let mut buffer = vec![0u8; value_size];
276 {
277 let left = value.pack(&mut buffer).unwrap();
278 assert_eq!(left.len(), 0);
279 }
280 assert_eq!(buffer, bytes);
281 let mut buffer = vec![0xccu8; value_size - 1];
282 assert!(value.pack(&mut buffer).is_err());
283 let mut buffer = vec![0u8; value_size + 2];
284 {
285 let left = value.pack(&mut buffer).unwrap();
286 assert_eq!(left.len(), 2);
287 }
288 }
289
290 #[test]
291 fn pack_unpack_u8() {
292 pack_unpack_test(&[0x5a], 0x5au8);
293 }
294
295 #[test]
296 fn pack_unpack_i8() {
297 pack_unpack_test(&[0xa5], -91i8);
298 }
299
300 #[test]
301 fn pack_unpack_u16() {
302 pack_unpack_test(&[0x22, 0xaa], 0xaa22u16.to_le());
303 }
304
305 #[test]
306 fn pack_unpack_i16() {
307 pack_unpack_test(&[0x55, 0xaa], (-21931i16).to_le());
308 }
309
310 #[test]
311 fn pack_unpack_u32() {
312 pack_unpack_test(&[0x44, 0x33, 0x22, 0x11], 0x11223344u32.to_le());
313 }
314
315 #[test]
316 fn pack_unpack_i32() {
317 pack_unpack_test(&[0x11, 0x22, 0x33, 0xa4], (-1540152815i32).to_le());
318 }
319
320 #[test]
321 fn pack_unpack_u64() {
322 pack_unpack_test(
323 &[0x88, 0x77, 0x66, 0x55, 0x44, 0x33, 0x22, 0x11],
324 0x1122334455667788u64.to_le(),
325 );
326 }
327
328 #[test]
329 fn pack_unpack_i64() {
330 pack_unpack_test(
331 &[0x11, 0x77, 0x66, 0x55, 0x44, 0x33, 0x22, 0x88],
332 (-8637284766759618799i64).to_le(),
333 );
334 }
335
336 #[test]
337 fn pack_unpack_hardware_address() {
338 let bytes = vec![0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff];
339 let hwa = HardwareAddress::from(bytes.as_slice());
340 pack_unpack_test(bytes.as_slice(), hwa);
341 }
342
343 #[test]
344 fn pack_unpack_any_vec() {
345 let v = vec![1u16, 2u16];
346 let mut buffer = vec![0u8; mem::size_of::<u16>() * v.len()];
347 let size = pack_vec(&mut buffer, &v).unwrap();
348 assert_eq!(size, 4usize);
349 assert_eq!(buffer, &[0x01, 0x00, 0x02, 0x00]);
350
351 let v = vec![1u32, 2u32];
352 let mut buffer = vec![0u8; mem::size_of::<u32>() * v.len()];
353 let size = pack_vec(&mut buffer, &v).unwrap();
354 assert_eq!(size, 8usize);
355 assert_eq!(buffer, &[0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00]);
356 }
357}