mail_internals_ng/utils/
mod.rs

1//! Some more general utilities.
2//!
3//! Or with other words, thinks which
4//! (currently) have no other place to
5//! be placed in.
6use std::any::TypeId;
7use std::cell::RefCell;
8use std::mem;
9use std::fmt::{self, Debug};
10
11
12/// Helper for implementing debug for an iterable think where the think on itself is irrelevant.
13pub struct DebugIterableOpaque<I> {
14    one_use_inner: RefCell<I>
15}
16
17impl<I> DebugIterableOpaque<I> {
18    pub fn new(one_use_inner: I) -> Self {
19        let one_use_inner = RefCell::new(one_use_inner);
20        DebugIterableOpaque { one_use_inner }
21    }
22}
23impl<I> Debug for DebugIterableOpaque<I>
24    where I: Iterator, I::Item: Debug
25{
26    fn fmt(&self, fter: &mut fmt::Formatter) -> fmt::Result {
27        let mut borrow = self.one_use_inner.borrow_mut();
28        fter.debug_list().entries(&mut *borrow).finish()
29    }
30}
31
32
33//FIXME[rust/fat pointer cast]: make it ?Sized once it's supported by rust
34///
35/// Used to undo type erasure in a generic context,
36/// roughly semantically eqivalent to creating a `&Any`
37/// type object from the input and then using `downcast_ref::<EXP>()`,
38/// except that it does not require the cration of a
39/// trait object as a step inbetween.
40///
41/// Note:
42/// This function can be used for some form of specialisation,
43/// (not just in a performence sense) but all "specialization path"
44/// have to be known when writing the unspeciallized version and
45/// it is easy to make functions behave in a unexpected (but safe)
46/// way so use with care.
47///
48///
49#[inline(always)]
50pub fn uneraser_ref<GOT: 'static, EXP: 'static>(inp: &GOT ) -> Option<&EXP>  {
51    if TypeId::of::<GOT>() == TypeId::of::<EXP>() {
52        //SAFE: the GOT type is exact the same as the EXP type,
53        // the compiler just does not know this due to type erasure wrt.
54        // generic types
55        let res: &EXP = unsafe { mem::transmute::<&GOT, &EXP>(inp) };
56        Some( res )
57    } else {
58        None
59    }
60}
61
62
63//FIXME[rust/fat pointer cast]: make it ?Sized once it's supported by rust
64#[doc(hidden)]
65#[inline(always)]
66pub fn uneraser_mut<GOT: 'static, EXP: 'static>(inp: &mut GOT ) -> Option<&mut EXP> {
67    if TypeId::of::<GOT>() == TypeId::of::<EXP>() {
68        //SAFE: the GOT type is exact the same as the EXP type,
69        // the compiler just does not know this due to type erasure wrt.
70        // generic types
71        let res: &mut EXP = unsafe { mem::transmute::<&mut GOT, &mut EXP>(inp) };
72        Some( res )
73    } else {
74        None
75    }
76}
77
78//FIXME: only works if the rust compiler get's a bit more clever or a bit less (either is fine)
79//#[inline(always)]
80//pub fn uneraser<GOT: 'static, EXP: 'static>( inp: GOT ) -> Result<EXP, GOT> {
81//    if TypeId::of::<GOT>() == TypeId::of::<EXP>() {
82//        //SAFE: the GOT type is exact the same as the EXP type,
83//        // the compiler just does not know this due to type erasure wrt.
84//        // generic types
85//        Ok( unsafe { mem::transmute::<GOT, EXP>( inp ) } )
86//    } else {
87//        Err( inp )
88//    }
89//}
90
91//fn get_flat_byte_repr<T>(val: &T) -> Vec<u8> {
92//    let count = mem::size_of::<T>();
93//    let mut out = Vec::with_capacity(count);
94//    let byte_ptr = val as *const T as *const u8;
95//    for offset in 0..count {
96//        out.push( unsafe {
97//            *byte_ptr.offset(offset as isize)
98//        })
99//    }
100//    out
101//}
102
103
104
105/// returns true if this a not first byte from a multi byte utf-8
106///
107/// This will return false:
108/// - on all us-ascii  chars (as u8)
109/// - on the first byte of a multi-byte utf-8 char
110///
111pub fn is_utf8_continuation_byte(b: u8) -> bool {
112    // all additional bytes (and only them) in utf8 start with 0b10xxxxxx so while
113    (b & 0b11000000) == 0b10000000
114}
115
116/// Faster insertion of byte slices into a byte vector.
117pub fn vec_insert_bytes(target: &mut Vec<u8>, idx: usize, source: &[u8]) {
118    target.splice(idx..idx, source.iter().copied());
119    return;
120    use std::ptr::copy;
121
122    if idx > target.len() {
123        panic!("index out of bounds: the len is {} but the index is {}",
124            target.len(), idx);
125    }
126
127    let old_len = target.len();
128    let insertion_len = source.len();
129    let source_ptr = source.as_ptr();
130    let insertion_point = unsafe {
131        // SAFE: we panic if idx > target.len(), through idx == target.len() is fine
132        target.as_mut_ptr().offset(idx as isize)
133    };
134    let moved_data_len = old_len - idx;
135
136    target.reserve(insertion_len);
137
138    unsafe {
139        // SAFE 1: we reserved insertion_len and insertion_point is at most old_len
140        //         so offset is fine
141        // SAFE 2: insertion_point + insertion_len + moved_data_len needs to be
142        //         <= target + target.capacity(). By replacing variables:
143        //         - insertion_point + insertion_len + moved_data_len <= target + capacity
144        //         - target + idx + insertion_len + old_len - idx <= target + capacity
145        //         - target + idx + insertion_len + old_len - idx <= target + old_len + insertion_len
146        //         - idx + insertion_len + old_len - idx <= old_len + insertion_len
147        //         - idx - idx <= 0
148        //         - 0 <= 0  [Q.E.D]
149        copy(/*src*/insertion_point,
150             /*dest*/insertion_point.offset(insertion_len as isize),
151             /*count*/moved_data_len);
152
153        // SAFE: insertion_point + insertion_len needs to be <= target.capacity()
154        //   which is guaranteed as we reserve insertion len and insertion_point is
155        //   at most old len.
156        copy(source_ptr, insertion_point, insertion_len);
157
158        // SAFE: we reserved insertion_len bytes
159        target.set_len(old_len + insertion_len)
160    }
161}
162
163#[cfg(test)]
164mod tests {
165    use super::vec_insert_bytes;
166
167    #[test]
168    fn inserting_slices_at_beginning() {
169        let mut base = vec![0u8, 1u8, 2u8, 3u8];
170        let new = &[10u8, 11];
171
172        vec_insert_bytes(&mut base, 0, new);
173
174        assert_eq!(&*base, &[10u8, 11, 0, 1, 2, 3]);
175        assert!(base.capacity() >= 6);
176    }
177
178    #[test]
179    fn inserting_slices_at_end() {
180        let mut base = vec![0u8, 1u8, 2u8, 3u8];
181        let new = &[10u8, 11];
182
183        let end = base.len();
184        vec_insert_bytes(&mut base, end, new);
185
186        assert_eq!(&*base, &[0u8, 1, 2, 3, 10, 11]);
187        assert!(base.capacity() >= 6);
188    }
189
190    #[test]
191    fn inserting_slices_in_the_middle() {
192        let mut base = vec![0u8, 1u8, 2u8, 3u8];
193        let new = &[10u8, 11];
194
195        vec_insert_bytes(&mut base, 1, new);
196
197        assert_eq!(&*base, &[0u8, 10, 11, 1, 2, 3]);
198        assert!(base.capacity() >= 6);
199    }
200
201    #[test]
202    fn inserting_slices_large_in_the_middle() {
203        let mut base = vec![0u8, 1u8, 2u8, 3u8];
204        let new = &[10u8, 11, 12, 13, 14, 15, 16];
205
206        vec_insert_bytes(&mut base, 1, new);
207
208        assert_eq!(&*base, &[0u8, 10, 11, 12, 13, 14, 15, 16, 1, 2, 3]);
209        assert!(base.capacity() >= 11);
210    }
211
212    #[should_panic]
213    #[test]
214    fn insert_out_of_bound() {
215        let mut base = vec![0u8, 1u8, 2u8, 3u8];
216        let new = &[10u8];
217
218        vec_insert_bytes(&mut base, 10, new);
219    }
220}