lazyext_slice/
bytes_ext.rs

1#[cfg(feature = "alloc")]
2use alloc::borrow::Cow;
3use core::mem;
4use core::ptr::slice_from_raw_parts;
5use core::slice::{from_raw_parts, from_raw_parts_mut};
6
7macro_rules! cfg_bytes {
8    ($($item:item)*) => {
9        $(
10            #[cfg(feature = "bytes")]
11            #[cfg_attr(docsrs, doc(cfg(feature = "bytes")))]
12            $item
13        )*
14    }
15}
16
17#[cfg(feature = "alloc")]
18macro_rules! to_x_vec_impl {
19    ($this:ident, $typ:tt::$conv:tt) => {{
20        const SIZE: usize = mem::size_of::<$typ>();
21        let src = $this.as_bytes_ref();
22        assert_eq!(
23            src.len() % SIZE,
24            0,
25            "invalid length of u8 slice: {}",
26            src.len()
27        );
28        let ptr = src.as_ptr();
29        (0..src.len())
30            .step_by(SIZE)
31            .map(|v| unsafe { $typ::$conv(*(ptr.add(v) as *const _ as *const [_; SIZE])) })
32            .collect::<Vec<_>>()
33    }};
34}
35
36macro_rules! to_x_slice_impl_suite_in {
37    ($this: ident, $builder:ident, $trait:tt::$fn:tt::$ptr:tt, $raw_ptr: ident, $ty: ty) => {{
38        let src = $trait::$fn($this);
39        assert_eq!(src.len() % mem::size_of::<$ty>(), 0, "invalid length of u8 slice: {}", src.len());
40        let ptr = src.$ptr() as *const $ty;
41        unsafe {
42            $builder(ptr as *$raw_ptr $ty, src.len() / mem::size_of::<$ty>())
43        }
44    }};
45}
46
47macro_rules! to_x_slice_impl_suite {
48    ($builder:ident, $trait:tt::$fn:tt::$ptr:tt, $raw_ptr: ident, $([$ty: ty, $ty_literal: literal]), +$(,)?) => {
49        $(
50        paste! {
51            #[doc = concat!("Convert u8 slice to ", $ty_literal, " slice in native-endian(zero-copy)")]
52            fn [<to_ $ty _slice>](&self) -> &[$ty] {
53                to_x_slice_impl_suite_in!(self, $builder, $trait::$fn::$ptr, $raw_ptr, $ty)
54            }
55        }
56        )*
57    };
58    (mut $builder:ident, $trait:tt::$fn:tt::$ptr:tt, $raw_ptr: ident, $([$ty: ty, $ty_literal: literal]), +$(,)?) => {
59        $(
60        paste! {
61            #[doc = concat!("Convert mutable u8 slice to mutable", $ty_literal, " slice in native-endian(zero-copy)")]
62            fn [<to_ $ty _slice_mut>](&mut self) -> &[$ty] {
63                to_x_slice_impl_suite_in!(self, $builder, $trait::$fn::$ptr, $raw_ptr, $ty)
64            }
65        }
66        )*
67    };
68}
69
70#[cfg(feature = "alloc")]
71macro_rules! to_x_slice_lossy_impl {
72    ($this:ident, $typ: ident) => {{
73        const SIZE: usize = mem::size_of::<$typ>();
74        let src = $this.as_bytes_ref();
75        assert_eq!(
76            src.len() % SIZE,
77            0,
78            "invalid length of u8 slice: {}",
79            src.len()
80        );
81        let ptr = src.as_ptr() as *const $typ;
82        let lossy = unsafe { &*slice_from_raw_parts(ptr, src.len() / SIZE) };
83        Cow::Borrowed(lossy)
84    }};
85}
86
87#[cfg(feature = "alloc")]
88macro_rules! to_x_vec_impl_suite {
89    ($([$ty:ty, $ty_literal: literal]), +$(,)?) => {
90        $(
91        paste! {
92            #[doc = concat!("Copy u8 slice to ", $ty_literal, " vec in big-endian")]
93            #[inline]
94            fn [<to_be_ $ty _vec>](&self) -> Vec<$ty> {
95                to_x_vec_impl!(self, $ty::from_be_bytes)
96            }
97
98            #[doc = concat!("Copy u8 slice to ", $ty_literal, " vec in little-endian")]
99            #[inline]
100            fn [<to_le_ $ty _vec>](&self) -> Vec<$ty> {
101                to_x_vec_impl!(self, $ty::from_le_bytes)
102            }
103
104            #[doc = concat!("Copy u8 slice to ", $ty_literal, " vec in native-endian")]
105            #[inline]
106            fn [<to_ne_ $ty _vec>](&self) -> Vec<$ty> {
107                to_x_vec_impl!(self, $ty::from_ne_bytes)
108            }
109        }
110        )*
111    };
112}
113
114#[cfg(feature = "alloc")]
115macro_rules! to_x_slice_lossy_impl_suite {
116    ($([$ty: ty, $ty_literal: literal]), +$(,)?) => {
117        $(
118        paste! {
119            #[doc = concat!("convert u8 slice to Cow<'_, [", $ty_literal, "]> in native-endian (zero-copy)")]
120            fn [<to_ $ty _slice_lossy>](&self) -> Cow<'_, [$ty]> {
121                to_x_slice_lossy_impl!(self, $ty)
122            }
123        }
124        )*
125    };
126}
127
128macro_rules! to_x_impl_suites {
129    ($([$ty: ty, $ty_literal: literal]), +$(,)?) => {
130        cfg_alloc!(to_x_vec_impl_suite!($([$ty, $ty_literal],)*););
131        cfg_alloc!(to_x_slice_lossy_impl_suite!($([$ty, $ty_literal],)*););
132        to_x_slice_impl_suite!(from_raw_parts, AsBytesRef::as_bytes_ref::as_ptr, const, $([$ty, $ty_literal],)*);
133    };
134}
135
136// const MAX_BRUTE_FORCE: usize = 64;
137
138/// Converts to `&'a [u8]`
139pub trait AsBytesRef {
140    /// Converts to a u8 slice
141    fn as_bytes_ref(&self) -> &[u8];
142}
143
144/// Converts to `&'a mut [u8]`
145pub trait AsBytesMutRef: AsBytesRef {
146    /// Converts to a u8 slice
147    fn as_bytes_mut_ref(&mut self) -> &mut [u8];
148}
149
150/// Extensions for bytes
151pub trait BytesExt: AsBytesRef {
152    /// Returns whether the underlying bytes is equal
153    #[inline]
154    fn bytes_eq(&self, other: impl AsBytesRef) -> bool {
155        self.as_bytes_ref().eq(other.as_bytes_ref())
156    }
157
158    // /// Returns all of the index of the instance of sep in self, or None if sep is not present in s.
159    // fn grep_sub_indexes(&self, sep: impl AsBytesRef) -> Option<Vec<usize>> {
160    //     let b = self.as_bytes_ref();
161    //     let bl = b.len();
162    //     let sep = sep.as_bytes_ref();
163    //     let n = sep.len();
164    //
165    //     // when len if small, brute force is ok
166    //     if bl <= MAX_BRUTE_FORCE {
167    //         let mut vk = Vec::new();
168    //         for i in 0..(bl - n + 1) {
169    //             let mut ctr = 0;
170    //             for j in 0..(n + 1) {
171    //                 if b[i + j] != sep[j] {
172    //                     ctr = j;
173    //                     break;
174    //                 }
175    //             }
176    //             if ctr == n {
177    //                 vk.push(i);
178    //             }
179    //         }
180    //         return Some(vk);
181    //     }
182    //
183    //     // TODO: implement Boyer-Moore algorithm when we need to search in large byte slice
184    //     None
185    // }
186
187    impl_psfix_suites!(AsBytesRef::as_bytes_ref, u8, "u8");
188
189    to_x_impl_suites!(
190        [u16, "u16"],
191        [u32, "u32"],
192        [usize, "usize"],
193        [u64, "u64"],
194        [u128, "u128"],
195        [i8, "i8"],
196        [i16, "i16"],
197        [i32, "i32"],
198        [i64, "i64"],
199        [isize, "isize"],
200        [i128, "i128"],
201        [f32, "f32"],
202        [f64, "f64"]
203    );
204}
205
206/// Extensions for mutable bytes
207pub trait BytesMutExt: AsBytesMutRef + BytesExt {
208    to_x_slice_impl_suite!(
209        mut from_raw_parts_mut,
210        AsBytesMutRef::as_bytes_mut_ref::as_mut_ptr,
211        mut,
212        [u16, "u16"],
213        [u32, "u32"],
214        [usize, "usize"],
215        [u64, "u64"],
216        [u128, "u128"],
217        [i8, "i8"],
218        [i16, "i16"],
219        [i32, "i32"],
220        [i64, "i64"],
221        [isize, "isize"],
222        [i128, "i128"],
223        [f32, "f32"],
224        [f64, "f64"]
225    );
226}
227
228impl<'a> AsBytesRef for &'a [u8] {
229    fn as_bytes_ref(&self) -> &[u8] {
230        self
231    }
232}
233
234impl<'a> BytesExt for &'a [u8] {}
235
236impl<'a> AsBytesRef for &'a mut [u8] {
237    fn as_bytes_ref(&self) -> &[u8] {
238        self
239    }
240}
241
242impl<'a> AsBytesMutRef for &'a mut [u8] {
243    fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
244        self
245    }
246}
247
248impl<'a> BytesExt for &'a mut [u8] {}
249
250impl<'a> BytesMutExt for &'a mut [u8] {}
251
252impl<const N: usize> AsBytesRef for [u8; N] {
253    fn as_bytes_ref(&self) -> &[u8] {
254        self
255    }
256}
257
258impl<const N: usize> AsBytesMutRef for [u8; N] {
259    fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
260        self
261    }
262}
263
264impl<const N: usize> BytesExt for [u8; N] {}
265
266impl<const N: usize> BytesMutExt for [u8; N] {}
267
268cfg_alloc! {
269    impl AsBytesRef for Box<[u8]> {
270        fn as_bytes_ref(&self) -> &[u8] {
271            self.as_ref()
272        }
273    }
274
275    impl AsBytesMutRef for Box<[u8]> {
276        fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
277            self.as_mut()
278        }
279    }
280
281    impl BytesExt for Box<[u8]> {}
282
283    impl BytesMutExt for Box<[u8]> {}
284
285    impl<'a> AsBytesRef for &'a Box<[u8]> {
286        fn as_bytes_ref(&self) -> &[u8] {
287            self.as_ref()
288        }
289    }
290
291    impl<'a> BytesExt for &'a Box<[u8]> {}
292
293    impl<'a> AsBytesRef for &'a mut Box<[u8]> {
294        fn as_bytes_ref(&self) -> &[u8] {
295            self.as_ref()
296        }
297    }
298
299    impl<'a> AsBytesMutRef for &'a mut Box<[u8]> {
300        fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
301            self.as_mut()
302        }
303    }
304
305    impl<'a> BytesExt for &'a mut Box<[u8]> {}
306
307    impl<'a> BytesMutExt for &'a mut Box<[u8]> {}
308
309
310    impl<'a> AsBytesRef for &'a Vec<u8> {
311        fn as_bytes_ref(&self) -> &[u8] {
312            self.as_slice()
313        }
314    }
315
316    impl<'a> BytesExt for &'a Vec<u8> {}
317
318    impl<'a> AsBytesRef for &'a mut Vec<u8> {
319        fn as_bytes_ref(&self) -> &[u8] {
320            self.as_slice()
321        }
322    }
323
324    impl<'a> AsBytesMutRef for &'a mut Vec<u8> {
325        fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
326            self.as_mut_slice()
327        }
328    }
329
330    impl<'a> BytesExt for &'a mut Vec<u8> {}
331
332    impl<'a> BytesMutExt for &'a mut Vec<u8> {}
333
334    impl AsBytesRef for Vec<u8> {
335        fn as_bytes_ref(&self) -> &[u8] {
336            self.as_slice()
337        }
338    }
339
340    impl AsBytesMutRef for Vec<u8> {
341        fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
342            self.as_mut_slice()
343        }
344    }
345
346    impl BytesExt for Vec<u8> {}
347
348    impl BytesMutExt for Vec<u8> {}
349}
350
351impl AsBytesRef for String {
352    fn as_bytes_ref(&self) -> &[u8] {
353        self.as_bytes()
354    }
355}
356
357impl BytesExt for String {}
358
359impl<'a> AsBytesRef for &'a String {
360    fn as_bytes_ref(&self) -> &[u8] {
361        self.as_bytes()
362    }
363}
364
365impl<'a> AsBytesRef for &'a mut String {
366    fn as_bytes_ref(&self) -> &[u8] {
367        self.as_bytes()
368    }
369}
370
371impl<'a> AsBytesRef for &'a str {
372    fn as_bytes_ref(&self) -> &[u8] {
373        self.as_bytes()
374    }
375}
376
377impl<'a> AsBytesRef for &'a mut str {
378    fn as_bytes_ref(&self) -> &[u8] {
379        self.as_bytes()
380    }
381}
382
383impl<'a> BytesExt for &'a String {}
384
385impl<'a> BytesExt for &'a mut String {}
386
387impl<'a> BytesExt for &'a str {}
388
389impl<'a> BytesExt for &'a mut str {}
390
391cfg_bytes! {
392    use bytes::{Bytes, BytesMut};
393
394    impl AsBytesRef for Bytes {
395        fn as_bytes_ref(&self) -> &[u8] {
396            self.as_ref()
397        }
398    }
399
400    impl BytesExt for Bytes {}
401
402    impl<'a> AsBytesRef for &'a Bytes {
403        fn as_bytes_ref(&self) -> &[u8] {
404            self.as_ref()
405        }
406    }
407
408    impl<'a> BytesExt for &'a Bytes {}
409
410    impl AsBytesRef for BytesMut {
411        fn as_bytes_ref(&self) -> &[u8] {
412            self.as_ref()
413        }
414    }
415
416    impl AsBytesMutRef for BytesMut {
417        fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
418            self.as_mut()
419        }
420    }
421
422    impl BytesExt for BytesMut {}
423
424    impl BytesMutExt for BytesMut {}
425
426    impl<'a> AsBytesRef for &'a BytesMut {
427        fn as_bytes_ref(&self) -> &[u8] {
428            self.as_ref()
429        }
430    }
431
432    impl<'a> BytesExt for &'a BytesMut {}
433
434    impl<'a> AsBytesRef for &'a mut BytesMut {
435        fn as_bytes_ref(&self) -> &[u8] {
436            self.as_ref()
437        }
438    }
439
440    impl<'a> AsBytesMutRef for &'a mut BytesMut {
441        fn as_bytes_mut_ref(&mut self) -> &mut [u8] {
442            self.as_mut()
443        }
444    }
445
446    impl<'a> BytesExt for &'a mut BytesMut {}
447
448    impl<'a> BytesMutExt for &'a mut BytesMut {}
449
450}
451
452#[cfg(test)]
453mod tests {
454    use super::BytesExt;
455
456    #[test]
457    fn test_has_prefix() {
458        let a = "Hello, LazyExt!";
459        let b = "Hello";
460        assert!(a.has_prefix(b));
461    }
462
463    #[test]
464    fn test_has_suffix() {
465        let a = "Hello, LazyExt!";
466        let b = "LazyExt!";
467        assert!(a.has_suffix(b));
468    }
469
470    #[test]
471    fn test_longest_prefix() {
472        let a = "Hello, LazyExt!";
473        let b = "Hello, Rust!";
474        assert_eq!(a.longest_prefix(b).len(), "Hello, ".len());
475    }
476
477    #[test]
478    fn test_longest_suffix() {
479        let a = "Hello, LazyExt!";
480        let b = "Hi, LazyExt!";
481        assert_eq!(a.longest_suffix(b).len(), ", LazyExt!".len());
482        assert_eq!(b.longest_suffix(a).len(), ", LazyExt!".len());
483
484        let a = "Hello, LazyExt!";
485        let b = "LazyExt!";
486        assert_eq!(a.longest_suffix(b).len(), "LazyExt!".len());
487        assert_eq!(b.longest_suffix(a).len(), "LazyExt!".len());
488    }
489
490    #[test]
491    fn test_to_u16() {
492        let a = vec![0u8, 1, 0, 2];
493        assert_eq!(a.to_be_u16_vec(), vec![1u16, 2u16]);
494
495        let a = vec![1u8, 0, 2, 0];
496        assert_eq!(a.to_le_u16_vec(), vec![1u16, 2u16]);
497        assert_eq!(a.to_ne_u16_vec().as_slice(), a.to_u16_slice());
498        eprintln!(
499            "{:?} {:?} {:?}",
500            a.to_be_u32_vec(),
501            a.to_ne_u32_vec(),
502            a.to_le_u32_vec()
503        )
504    }
505}