mail_internals/utils/
mod.rs

1//! Some more general utilities.
2//!
3//! Or with other words, thinks which
4//! (currently) have no other place to
5//! be placed in.
6use std::any::TypeId;
7use std::cell::RefCell;
8use std::mem;
9use std::fmt::{self, Debug};
10
11
12/// Helper for implementing debug for an iterable think where the think on itself is irrelevant.
13pub struct DebugIterableOpaque<I> {
14    one_use_inner: RefCell<I>
15}
16
17impl<I> DebugIterableOpaque<I> {
18    pub fn new(one_use_inner: I) -> Self {
19        let one_use_inner = RefCell::new(one_use_inner);
20        DebugIterableOpaque { one_use_inner }
21    }
22}
23impl<I> Debug for DebugIterableOpaque<I>
24    where I: Iterator, I::Item: Debug
25{
26    fn fmt(&self, fter: &mut fmt::Formatter) -> fmt::Result {
27        let mut borrow = self.one_use_inner.borrow_mut();
28        fter.debug_list().entries(&mut *borrow).finish()
29    }
30}
31
32
33//FIXME[rust/fat pointer cast]: make it ?Sized once it's supported by rust
34///
35/// Used to undo type erasure in a generic context,
36/// roughly semantically eqivalent to creating a `&Any`
37/// type object from the input and then using `downcast_ref::<EXP>()`,
38/// except that it does not require the cration of a
39/// trait object as a step inbetween.
40///
41/// Note:
42/// This function can be used for some form of specialisation,
43/// (not just in a performence sense) but all "specialization path"
44/// have to be known when writing the unspeciallized version and
45/// it is easy to make functions behave in a unexpected (but safe)
46/// way so use with care.
47///
48///
49#[inline(always)]
50pub fn uneraser_ref<GOT: 'static, EXP: 'static>(inp: &GOT ) -> Option<&EXP>  {
51    if TypeId::of::<GOT>() == TypeId::of::<EXP>() {
52        //SAFE: the GOT type is exact the same as the EXP type,
53        // the compiler just does not know this due to type erasure wrt.
54        // generic types
55        let res: &EXP = unsafe { mem::transmute::<&GOT, &EXP>(inp) };
56        Some( res )
57    } else {
58        None
59    }
60}
61
62
63//FIXME[rust/fat pointer cast]: make it ?Sized once it's supported by rust
64#[doc(hidden)]
65#[inline(always)]
66pub fn uneraser_mut<GOT: 'static, EXP: 'static>(inp: &mut GOT ) -> Option<&mut EXP> {
67    if TypeId::of::<GOT>() == TypeId::of::<EXP>() {
68        //SAFE: the GOT type is exact the same as the EXP type,
69        // the compiler just does not know this due to type erasure wrt.
70        // generic types
71        let res: &mut EXP = unsafe { mem::transmute::<&mut GOT, &mut EXP>(inp) };
72        Some( res )
73    } else {
74        None
75    }
76}
77
78//FIXME: only works if the rust compiler get's a bit more clever or a bit less (either is fine)
79//#[inline(always)]
80//pub fn uneraser<GOT: 'static, EXP: 'static>( inp: GOT ) -> Result<EXP, GOT> {
81//    if TypeId::of::<GOT>() == TypeId::of::<EXP>() {
82//        //SAFE: the GOT type is exact the same as the EXP type,
83//        // the compiler just does not know this due to type erasure wrt.
84//        // generic types
85//        Ok( unsafe { mem::transmute::<GOT, EXP>( inp ) } )
86//    } else {
87//        Err( inp )
88//    }
89//}
90
91//fn get_flat_byte_repr<T>(val: &T) -> Vec<u8> {
92//    let count = mem::size_of::<T>();
93//    let mut out = Vec::with_capacity(count);
94//    let byte_ptr = val as *const T as *const u8;
95//    for offset in 0..count {
96//        out.push( unsafe {
97//            *byte_ptr.offset(offset as isize)
98//        })
99//    }
100//    out
101//}
102
103
104
105/// returns true if this a not first byte from a multi byte utf-8
106///
107/// This will return false:
108/// - on all us-ascii  chars (as u8)
109/// - on the first byte of a multi-byte utf-8 char
110///
111pub fn is_utf8_continuation_byte(b: u8) -> bool {
112    // all additional bytes (and only them) in utf8 start with 0b10xxxxxx so while
113    (b & 0b11000000) == 0b10000000
114}
115
116/// Faster insertion of byte slices into a byte vector.
117pub fn vec_insert_bytes(target: &mut Vec<u8>, idx: usize, source: &[u8]) {
118    use std::ptr::copy;
119
120    if idx > target.len() {
121        panic!("index out of bounds: the len is {} but the index is {}",
122            target.len(), idx);
123    }
124
125    let old_len = target.len();
126    let insertion_len = source.len();
127    let source_ptr = source.as_ptr();
128    let insertion_point = unsafe {
129        // SAFE: we panic if idx > target.len(), through idx == target.len() is fine
130        target.as_mut_ptr().offset(idx as isize)
131    };
132    let moved_data_len = old_len - idx;
133
134    target.reserve(insertion_len);
135
136    unsafe {
137        // SAFE 1: we reserved insertion_len and insertion_point is at most old_len
138        //         so offset is fine
139        // SAFE 2: insertion_point + insertion_len + moved_data_len needs to be
140        //         <= target + target.capacity(). By replacing variables:
141        //         - insertion_point + insertion_len + moved_data_len <= target + capacity
142        //         - target + idx + insertion_len + old_len - idx <= target + capacity
143        //         - target + idx + insertion_len + old_len - idx <= target + old_len + insertion_len
144        //         - idx + insertion_len + old_len - idx <= old_len + insertion_len
145        //         - idx - idx <= 0
146        //         - 0 <= 0  [Q.E.D]
147        copy(/*src*/insertion_point,
148             /*dest*/insertion_point.offset(insertion_len as isize),
149             /*count*/moved_data_len);
150
151        // SAFE: insertion_point + insertion_len needs to be <= target.capacity()
152        //   which is guaranteed as we reserve insertion len and insertion_point is
153        //   at most old len.
154        copy(source_ptr, insertion_point, insertion_len);
155
156        // SAFE: we reserved insertion_len bytes
157        target.set_len(old_len + insertion_len)
158    }
159}
160
161#[cfg(test)]
162mod tests {
163    use super::vec_insert_bytes;
164
165    #[test]
166    fn inserting_slices_at_beginning() {
167        let mut base = vec![0u8, 1u8, 2u8, 3u8];
168        let new = &[10u8, 11];
169
170        vec_insert_bytes(&mut base, 0, new);
171
172        assert_eq!(&*base, &[10u8, 11, 0, 1, 2, 3]);
173        assert!(base.capacity() >= 6);
174    }
175
176    #[test]
177    fn inserting_slices_at_end() {
178        let mut base = vec![0u8, 1u8, 2u8, 3u8];
179        let new = &[10u8, 11];
180
181        let end = base.len();
182        vec_insert_bytes(&mut base, end, new);
183
184        assert_eq!(&*base, &[0u8, 1, 2, 3, 10, 11]);
185        assert!(base.capacity() >= 6);
186    }
187
188    #[test]
189    fn inserting_slices_in_the_middle() {
190        let mut base = vec![0u8, 1u8, 2u8, 3u8];
191        let new = &[10u8, 11];
192
193        vec_insert_bytes(&mut base, 1, new);
194
195        assert_eq!(&*base, &[0u8, 10, 11, 1, 2, 3]);
196        assert!(base.capacity() >= 6);
197    }
198
199    #[test]
200    fn inserting_slices_large_in_the_middle() {
201        let mut base = vec![0u8, 1u8, 2u8, 3u8];
202        let new = &[10u8, 11, 12, 13, 14, 15, 16];
203
204        vec_insert_bytes(&mut base, 1, new);
205
206        assert_eq!(&*base, &[0u8, 10, 11, 12, 13, 14, 15, 16, 1, 2, 3]);
207        assert!(base.capacity() >= 11);
208    }
209
210    #[should_panic]
211    #[test]
212    fn insert_out_of_bound() {
213        let mut base = vec![0u8, 1u8, 2u8, 3u8];
214        let new = &[10u8];
215
216        vec_insert_bytes(&mut base, 10, new);
217    }
218}