in_situ/
lib.rs

1//! In Situ Endian-independent Bytes Access
2//!
3//! # Feature Gates
4//!
5//!   * `bytes`: For abstracting `Bytes` and `BytesMut`.
6//!   * `bstr`: For complementing [`InSitu::utf8()`] with `InSitu::bstr()`.
7
8#[cfg(feature = "bstr")]
9pub use bstr;
10#[cfg(feature = "bstr")]
11use bstr::BStr;
12pub use byteorder;
13#[cfg(feature = "bytes")]
14pub use bytes;
15
16use byteorder::{BE, ByteOrder, LE, NativeEndian};
17use std::{fmt::Debug, hash::Hash, mem, str::Utf8Error};
18
19/// Size of [`u8`] in bytes.
20pub const U8: usize = 1;
21/// Size of [`u16`] in bytes.
22pub const U16: usize = 2;
23/// Size of `u24` in bytes.
24pub const U24: usize = 3;
25/// Size of [`u32`] in bytes.
26pub const U32: usize = 4;
27/// Size of [`u64`] in bytes.
28pub const U64: usize = 8;
29/// Size of [`u128`] in bytes.
30pub const U128: usize = 16;
31/// Size of [`i8`] in bytes.
32pub const I8: usize = 1;
33/// Size of [`i16`] in bytes.
34pub const I16: usize = 2;
35/// Size of `i24` in bytes.
36pub const I24: usize = 3;
37/// Size of [`i32`] in bytes.
38pub const I32: usize = 4;
39/// Size of [`i64`] in bytes.
40pub const I64: usize = 8;
41/// Size of [`i128`] in bytes.
42pub const I128: usize = 16;
43/// Size of [`f32`] in bytes.
44pub const F32: usize = 4;
45/// Size of [`f64`] in bytes.
46pub const F64: usize = 8;
47
48/// Calculates padding of `align`ed `offset` in bytes.
49///
50/// Leverages two's complement shortcuts instead of branching and modulo operations.
51#[must_use]
52#[allow(clippy::cast_sign_loss, clippy::cast_possible_wrap)]
53pub fn padding(offset: usize, align: usize) -> usize {
54    let padding = -(offset as isize) as usize & (align - 1);
55    debug_assert_eq!(padding, (align - offset % align) % align);
56    padding
57}
58
59/// Calculates `align`ed `offset` in bytes.
60///
61/// Leverages two's complement shortcuts instead of branching and modulo operations.
62#[must_use]
63#[allow(clippy::cast_sign_loss, clippy::cast_possible_wrap)]
64pub fn aligned(offset: usize, align: usize) -> usize {
65    let aligned = (offset + align - 1) & -(align as isize) as usize;
66    debug_assert_eq!(aligned, offset + padding(offset, align));
67    aligned
68}
69
70/// Provides endian-independent immutable bytes access.
71///
72/// Requires methods to be implemented detecting or hardcoding the word size and endianness. This
73/// trait requires the <code>[AsRef]<\[[u8]\]></code> trait to access slices of generic types. It is
74/// not implemented for the [`Raw`] trait but instead for its wrapper types since each wrapper might
75/// implement the endianness detection differently. The generic type parameter `Scope` allows to
76/// define the trait's visibility, e.g., by assigning a private type instead of the public default
77/// type parameter `Scope = ()`.
78pub trait InSitu<Scope = ()>: AsRef<[u8]> {
79    /// The word size of the slice required by [`Self::at()`], not to be confused with the various
80    /// word sizes of how to access the slice. Use `0` if [`Self::is_le()`] does not affect the
81    /// offsets, i.e., the offsets are big-endian regardless of [`Self::is_le()`].
82    fn swap_size(&self) -> usize;
83    /// Whether the underlying bytes are in big-endian (BE) or little-endian (LE) byte order.
84    fn is_be(&self) -> bool;
85    /// Inversion of [`Self::is_be()`].
86    fn is_le(&self) -> bool {
87        !self.is_be()
88    }
89    /// Tests if the underlying byte order has the machine's native endianness.
90    fn is_native(&self) -> bool {
91        self.is_be() == (NativeEndian::read_u16(&[0, 1]) == 1)
92    }
93    /// Convert [`Self::is_be()`] and [`Self::is_le()`] into `Order`.
94    fn order(&self) -> Order {
95        if self.is_be() { Order::BE } else { Order::LE }
96    }
97    /// If [`Self::is_le()`], translates big-endian `offset` of word with `word_size` in slice of
98    /// [`Self::swap_size()`] into little-endian via bitwise instead of branching and modulo
99    /// operations, otherwise passes through `offset`.
100    fn at(&self, offset: usize, word_size: usize) -> usize {
101        if self.is_be() || self.swap_size() < word_size {
102            offset
103        } else {
104            offset ^ (self.swap_size() - word_size)
105        }
106    }
107    /// Gets [`&str`] if UTF-8 in slice of [`Self::swap_size()`] at big-endian `offset`
108    /// endian-independently.
109    ///
110    /// # Errors
111    ///
112    /// Returns [`Utf8Error`] if the slice is not UTF-8 with a description as to why the provided
113    /// slice is not UTF-8.
114    fn utf8(&self, offset: usize, length: usize) -> Result<&str, Utf8Error> {
115        std::str::from_utf8(&self.as_ref()[offset..][..length])
116    }
117    /// Gets [`BStr`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
118    #[cfg(feature = "bstr")]
119    fn bstr(&self, offset: usize, length: usize) -> &BStr {
120        BStr::new(&self.as_ref()[offset..][..length])
121    }
122    /// Gets [`bool`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
123    fn bool(&self, offset: usize) -> bool {
124        self.u8(offset) != 0
125    }
126    /// Gets [`u8`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
127    fn u8(&self, offset: usize) -> u8 {
128        let offset = self.at(offset, U8);
129        self.as_ref()[offset]
130    }
131    /// Gets [`u16`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
132    fn u16(&self, offset: usize) -> u16 {
133        let offset = self.at(offset, U16);
134        if self.is_be() {
135            BE::read_u16(&self.as_ref()[offset..])
136        } else {
137            LE::read_u16(&self.as_ref()[offset..])
138        }
139    }
140    /// Gets `u24` as [`u32`] in slice of [`Self::swap_size()`] at big-endian `offset`
141    /// endian-independently.
142    fn u24(&self, offset: usize) -> u32 {
143        let offset = self.at(offset, U24);
144        if self.is_be() {
145            BE::read_u24(&self.as_ref()[offset..])
146        } else {
147            LE::read_u24(&self.as_ref()[offset..])
148        }
149    }
150    /// Gets [`u32`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
151    fn u32(&self, offset: usize) -> u32 {
152        let offset = self.at(offset, U32);
153        if self.is_be() {
154            BE::read_u32(&self.as_ref()[offset..])
155        } else {
156            LE::read_u32(&self.as_ref()[offset..])
157        }
158    }
159    /// Gets [`u64`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
160    fn u64(&self, offset: usize) -> u64 {
161        let offset = self.at(offset, U64);
162        if self.is_be() {
163            BE::read_u64(&self.as_ref()[offset..])
164        } else {
165            LE::read_u64(&self.as_ref()[offset..])
166        }
167    }
168    /// Gets [`u128`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
169    fn u128(&self, offset: usize) -> u128 {
170        let offset = self.at(offset, U128);
171        if self.is_be() {
172            BE::read_u128(&self.as_ref()[offset..])
173        } else {
174            LE::read_u128(&self.as_ref()[offset..])
175        }
176    }
177    /// Gets unsigned integer of `word_size <= 8` in slice of [`Self::swap_size()`] at big-endian
178    /// `offset` endian-independently.
179    fn uint(&self, offset: usize, word_size: usize) -> u64 {
180        let offset = self.at(offset, word_size);
181        if self.is_be() {
182            BE::read_uint(&self.as_ref()[offset..], word_size)
183        } else {
184            LE::read_uint(&self.as_ref()[offset..], word_size)
185        }
186    }
187    /// Gets unsigned integer of `word_size <= 16` in slice of [`Self::swap_size()`] at big-endian
188    /// `offset` endian-independently.
189    fn uint128(&self, offset: usize, word_size: usize) -> u128 {
190        let offset = self.at(offset, word_size);
191        if self.is_be() {
192            BE::read_uint128(&self.as_ref()[offset..], word_size)
193        } else {
194            LE::read_uint128(&self.as_ref()[offset..], word_size)
195        }
196    }
197    /// Gets [`i8`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
198    #[allow(clippy::cast_possible_wrap)]
199    fn i8(&self, offset: usize) -> i8 {
200        let offset = self.at(offset, I8);
201        self.as_ref()[offset] as i8
202    }
203    /// Gets [`i16`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
204    fn i16(&self, offset: usize) -> i16 {
205        let offset = self.at(offset, I16);
206        if self.is_be() {
207            BE::read_i16(&self.as_ref()[offset..])
208        } else {
209            LE::read_i16(&self.as_ref()[offset..])
210        }
211    }
212    /// Gets `i24` as [`i32`] in slice of [`Self::swap_size()`] at big-endian `offset`
213    /// endian-independently.
214    fn i24(&self, offset: usize) -> i32 {
215        let offset = self.at(offset, I24);
216        if self.is_be() {
217            BE::read_i24(&self.as_ref()[offset..])
218        } else {
219            LE::read_i24(&self.as_ref()[offset..])
220        }
221    }
222    /// Gets [`i32`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
223    fn i32(&self, offset: usize) -> i32 {
224        let offset = self.at(offset, I32);
225        if self.is_be() {
226            BE::read_i32(&self.as_ref()[offset..])
227        } else {
228            LE::read_i32(&self.as_ref()[offset..])
229        }
230    }
231    /// Gets [`i64`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
232    fn i64(&self, offset: usize) -> i64 {
233        let offset = self.at(offset, I64);
234        if self.is_be() {
235            BE::read_i64(&self.as_ref()[offset..])
236        } else {
237            LE::read_i64(&self.as_ref()[offset..])
238        }
239    }
240    /// Gets [`u128`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
241    fn i128(&self, offset: usize) -> i128 {
242        let offset = self.at(offset, I128);
243        if self.is_be() {
244            BE::read_i128(&self.as_ref()[offset..])
245        } else {
246            LE::read_i128(&self.as_ref()[offset..])
247        }
248    }
249    /// Gets signed integer of `word_size <= 8` in slice of [`Self::swap_size()`] at big-endian
250    /// `offset` endian-independently.
251    fn int(&self, offset: usize, word_size: usize) -> i64 {
252        let offset = self.at(offset, word_size);
253        if self.is_be() {
254            BE::read_int(&self.as_ref()[offset..], word_size)
255        } else {
256            LE::read_int(&self.as_ref()[offset..], word_size)
257        }
258    }
259    /// Gets signed integer of `word_size <= 16` in slice of [`Self::swap_size()`] at big-endian
260    /// `offset` endian-independently.
261    fn int128(&self, offset: usize, word_size: usize) -> i128 {
262        let offset = self.at(offset, word_size);
263        if self.is_be() {
264            BE::read_int128(&self.as_ref()[offset..], word_size)
265        } else {
266            LE::read_int128(&self.as_ref()[offset..], word_size)
267        }
268    }
269    /// Gets [`f32`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
270    fn f32(&self, offset: usize) -> f32 {
271        let offset = self.at(offset, F32);
272        if self.is_be() {
273            BE::read_f32(&self.as_ref()[offset..])
274        } else {
275            LE::read_f32(&self.as_ref()[offset..])
276        }
277    }
278    /// Gets [`f64`] in slice of [`Self::swap_size()`] at big-endian `offset` endian-independently.
279    fn f64(&self, offset: usize) -> f64 {
280        let offset = self.at(offset, F64);
281        if self.is_be() {
282            BE::read_f64(&self.as_ref()[offset..])
283        } else {
284            LE::read_f64(&self.as_ref()[offset..])
285        }
286    }
287}
288
289/// Provides endian-independent mutable bytes access.
290///
291/// Requires <code>[InSitu]\<Scope\></code> trait to know about endianness.
292/// <code>[InSituMut]\<Scope\></code> is **not (yet)** auto-implemented for all
293/// <code>[InSitu]\<Scope\> + [AsMut]\<\[[u8]\]\></code> implementors as the trait name would leak
294/// into the documentation under blanket implementations even for a private generic type parameter
295/// `Scope`. This might be resolved in the far future by a language extension supporting scoped
296/// trait implementations or by fixing *rustdoc* if there are no loopholes which would allow to
297/// actually use the trait.
298pub trait InSituMut<Scope = ()>: InSitu<Scope> + AsMut<[u8]> {
299    /// Sets [`bool`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
300    /// endian-independently.
301    fn set_bool(&mut self, offset: usize, value: bool) {
302        self.set_u8(offset, value.into());
303    }
304    /// Sets [`u8`] in slice of [`InSitu::swap_size()`] at big-endian `offset` endian-independently.
305    fn set_u8(&mut self, offset: usize, value: u8) {
306        let at = self.at(offset, U8);
307        self.as_mut()[at] = value;
308    }
309    /// Sets [`u16`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
310    /// endian-independently.
311    fn set_u16(&mut self, offset: usize, value: u16) {
312        let offset = self.at(offset, U16);
313        if self.is_be() {
314            BE::write_u16(&mut self.as_mut()[offset..], value);
315        } else {
316            LE::write_u16(&mut self.as_mut()[offset..], value);
317        }
318    }
319    /// Sets `u24` as [`u32`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
320    /// endian-independently.
321    fn set_u24(&mut self, offset: usize, value: u32) {
322        let offset = self.at(offset, U24);
323        if self.is_be() {
324            BE::write_u24(&mut self.as_mut()[offset..], value);
325        } else {
326            LE::write_u24(&mut self.as_mut()[offset..], value);
327        }
328    }
329    /// Sets [`u32`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
330    /// endian-independently.
331    fn set_u32(&mut self, offset: usize, value: u32) {
332        let offset = self.at(offset, U32);
333        if self.is_be() {
334            BE::write_u32(&mut self.as_mut()[offset..], value);
335        } else {
336            LE::write_u32(&mut self.as_mut()[offset..], value);
337        }
338    }
339    /// Sets [`u64`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
340    /// endian-independently.
341    fn set_u64(&mut self, offset: usize, value: u64) {
342        let offset = self.at(offset, U64);
343        if self.is_be() {
344            BE::write_u64(&mut self.as_mut()[offset..], value);
345        } else {
346            LE::write_u64(&mut self.as_mut()[offset..], value);
347        }
348    }
349    /// Sets [`u128`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
350    /// endian-independently.
351    fn set_u128(&mut self, offset: usize, value: u128) {
352        let offset = self.at(offset, U128);
353        if self.is_be() {
354            BE::write_u128(&mut self.as_mut()[offset..], value);
355        } else {
356            LE::write_u128(&mut self.as_mut()[offset..], value);
357        }
358    }
359    /// Sets unsigned integer of `word_size <= 8` in slice of [`InSitu::swap_size()`] at big-endian
360    /// `offset` endian-independently.
361    fn set_uint(&mut self, offset: usize, value: u64, word_size: usize) {
362        let offset = self.at(offset, word_size);
363        if self.is_be() {
364            BE::write_uint(&mut self.as_mut()[offset..], value, word_size);
365        } else {
366            LE::write_uint(&mut self.as_mut()[offset..], value, word_size);
367        }
368    }
369    /// Sets unsigned integer of `word_size <= 16` in slice of [`InSitu::swap_size()`] at big-endian
370    /// `offset` endian-independently.
371    fn set_uint128(&mut self, offset: usize, value: u128, word_size: usize) {
372        let offset = self.at(offset, word_size);
373        if self.is_be() {
374            BE::write_uint128(&mut self.as_mut()[offset..], value, word_size);
375        } else {
376            LE::write_uint128(&mut self.as_mut()[offset..], value, word_size);
377        }
378    }
379    /// Sets [`i8`] in slice of [`InSitu::swap_size()`] at big-endian `offset` endian-independently.
380    #[allow(clippy::cast_sign_loss)]
381    fn set_i8(&mut self, offset: usize, value: i8) {
382        let at = self.at(offset, I8);
383        self.as_mut()[at] = value as u8;
384    }
385    /// Sets [`i16`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
386    /// endian-independently.
387    fn set_i16(&mut self, offset: usize, value: i16) {
388        let offset = self.at(offset, I16);
389        if self.is_be() {
390            BE::write_i16(&mut self.as_mut()[offset..], value);
391        } else {
392            LE::write_i16(&mut self.as_mut()[offset..], value);
393        }
394    }
395    /// Sets `i24` as [`i32`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
396    /// endian-independently.
397    fn set_i24(&mut self, offset: usize, value: i32) {
398        let offset = self.at(offset, I24);
399        if self.is_be() {
400            BE::write_i24(&mut self.as_mut()[offset..], value);
401        } else {
402            LE::write_i24(&mut self.as_mut()[offset..], value);
403        }
404    }
405    /// Sets [`i32`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
406    /// endian-independently.
407    fn set_i32(&mut self, offset: usize, value: i32) {
408        let offset = self.at(offset, I32);
409        if self.is_be() {
410            BE::write_i32(&mut self.as_mut()[offset..], value);
411        } else {
412            LE::write_i32(&mut self.as_mut()[offset..], value);
413        }
414    }
415    /// Sets [`i64]` in slice of [`InSitu::swap_size()`] at big-endian `offset`
416    /// endian-independently.
417    fn set_i64(&mut self, offset: usize, value: i64) {
418        let offset = self.at(offset, I64);
419        if self.is_be() {
420            BE::write_i64(&mut self.as_mut()[offset..], value);
421        } else {
422            LE::write_i64(&mut self.as_mut()[offset..], value);
423        }
424    }
425    /// Sets [`i128`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
426    /// endian-independently.
427    fn set_i128(&mut self, offset: usize, value: i128) {
428        let offset = self.at(offset, I128);
429        if self.is_be() {
430            BE::write_i128(&mut self.as_mut()[offset..], value);
431        } else {
432            LE::write_i128(&mut self.as_mut()[offset..], value);
433        }
434    }
435    /// Sets signed integer of `word_size <= 8` in slice of [`InSitu::swap_size()`] at big-endian
436    /// `offset` endian-independently.
437    fn set_int(&mut self, offset: usize, value: i64, word_size: usize) {
438        let offset = self.at(offset, word_size);
439        if self.is_be() {
440            BE::write_int(&mut self.as_mut()[offset..], value, word_size);
441        } else {
442            LE::write_int(&mut self.as_mut()[offset..], value, word_size);
443        }
444    }
445    /// Sets signed integer of `word_size <= 16` in slice of [`InSitu::swap_size()`] at big-endian
446    /// `offset` endian-independently.
447    fn set_int128(&mut self, offset: usize, value: i128, word_size: usize) {
448        let offset = self.at(offset, word_size);
449        if self.is_be() {
450            BE::write_int128(&mut self.as_mut()[offset..], value, word_size);
451        } else {
452            LE::write_int128(&mut self.as_mut()[offset..], value, word_size);
453        }
454    }
455    /// Sets [`f32`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
456    /// endian-independently.
457    fn set_f32(&mut self, offset: usize, value: f32) {
458        let offset = self.at(offset, F32);
459        if self.is_be() {
460            BE::write_f32(&mut self.as_mut()[offset..], value);
461        } else {
462            LE::write_f32(&mut self.as_mut()[offset..], value);
463        }
464    }
465    /// Sets [`f64`] in slice of [`InSitu::swap_size()`] at big-endian `offset`
466    /// endian-independently.
467    fn set_f64(&mut self, offset: usize, value: f64) {
468        let offset = self.at(offset, F64);
469        if self.is_be() {
470            BE::write_f64(&mut self.as_mut()[offset..], value);
471        } else {
472            LE::write_f64(&mut self.as_mut()[offset..], value);
473        }
474    }
475}
476
477// /// Auto-implement <code>[InSituMut]\<S\> for [InSitu]\<S\> + [AsMut]\<\[[u8]\]\></code>
478// /// implementors.
479// impl<T: InSitu<S> + AsMut<[u8]>, S> InSituMut<S> for T {}
480
481/// Abstracts immutable as well as mutable generic bytes view types like <code>&\[[u8]\]</code> and
482/// <code>&mut \[[u8]\]</code> as immutable views.
483///
484/// With the `bytes` feature, abstacts `Bytes` and `BytesMut` as well.
485///
486/// Requires some standard nice-to-have but easily-to-get traits, so the wrapper can just derive
487/// them. Requires methods to be implemented to split views into subviews.
488pub trait Raw: AsRef<[u8]> + Default + PartialEq + Eq + PartialOrd + Ord + Debug + Hash {
489    /// Splits the bytes into two at the given index.
490    ///
491    /// Afterwards, `self` contains elements `[0, at)`, and the returned [`Self`] contains elements
492    /// `[at, len)`.
493    #[must_use]
494    fn split_off(&mut self, at: usize) -> Self;
495    /// Splits the bytes into two at the given index.
496    ///
497    /// Afterwards, `self` contains elements `[at, len)`, and the returned [`Self`] contains
498    /// elements `[0, at)`.
499    #[must_use]
500    fn split_to(&mut self, at: usize) -> Self;
501}
502
503/// Abstracts mutable generic bytes view types like <code>&mut \[[u8]\]</code> as mutable view.
504///
505/// With the `bytes` feature, abstacts `BytesMut` as well.
506///
507/// This trait is auto-implemented for <code>[Raw] + [AsMut]\<\[[u8]\]\></code> implementors
508/// extending the immutable views with mutable ones.
509pub trait RawMut: Raw + AsMut<[u8]> {}
510
511// Auto-implement [`RawMut`] for <code>[Raw] + [AsMut]\<\[[u8]\]\></code> implementors.
512impl<T: Raw + AsMut<[u8]>> RawMut for T {}
513
514impl Raw for &[u8] {
515    fn split_off(&mut self, at: usize) -> Self {
516        let (l, r) = self.split_at(at);
517        *self = l;
518        r
519    }
520    fn split_to(&mut self, at: usize) -> Self {
521        let (l, r) = self.split_at(at);
522        *self = r;
523        l
524    }
525}
526
527impl Raw for &mut [u8] {
528    fn split_off(&mut self, at: usize) -> Self {
529        let slice = mem::take(self);
530        let (l, r) = slice.split_at_mut(at);
531        *self = l;
532        r
533    }
534    fn split_to(&mut self, at: usize) -> Self {
535        let slice = mem::take(self);
536        let (l, r) = slice.split_at_mut(at);
537        *self = r;
538        l
539    }
540}
541
542#[cfg(feature = "bytes")]
543impl Raw for bytes::Bytes {
544    fn split_off(&mut self, at: usize) -> Self {
545        self.split_off(at)
546    }
547    fn split_to(&mut self, at: usize) -> Self {
548        self.split_to(at)
549    }
550}
551
552#[cfg(feature = "bytes")]
553impl Raw for bytes::BytesMut {
554    fn split_off(&mut self, at: usize) -> Self {
555        self.split_off(at)
556    }
557    fn split_to(&mut self, at: usize) -> Self {
558        self.split_to(at)
559    }
560}
561
562/// Helper type describing the underlying byte order.
563#[derive(Debug, Clone, Copy, PartialEq, Eq)]
564pub enum Order {
565    /// Big-endian byte order.
566    BE,
567    /// Little-endian byte order.
568    LE,
569}
570
571/// Helper type specifying whether to take the bytes of the header only or the whole packet.
572#[derive(Debug, Clone, Copy, PartialEq, Eq)]
573pub enum Take {
574    /// Take bytes of header only.
575    Header,
576    /// Take bytes of whole packet.
577    Packet,
578}