lmdb_zero/
unaligned.rs

1// Copyright 2016 FullContact, Inc
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
8
9use std::cmp;
10use std::fmt;
11use std::hash::{Hash, Hasher};
12use std::mem;
13use std::ops;
14
15use traits::*;
16
17/// Wrapper for arbitrary `Copy` types which lifts their alignment
18/// restrictions.
19///
20/// This allows using values which have non-byte alignment but are otherwise
21/// LMDB-safe (as defined by `LmdbRaw`) to be used with it. It obviously does
22/// not make `T` itself packed, so the same discussion with respect to padding
23/// in the `LmdbRaw` documentation applies here as well.
24///
25/// There is no way to get a reference to the contained value, as Rust
26/// currently has no way to express that the reference may be misaligned. (See
27/// also [https://github.com/rust-lang/rust/issues/27060](https://github.com/rust-lang/rust/issues/27060).)
28///
29/// ### Example
30///
31/// ```
32/// use lmdb_zero as lmdb;
33/// use lmdb_zero::Unaligned as U;
34///
35/// fn get_a_u64(env: &lmdb::Environment, db: &lmdb::Database,
36///              key: &str) -> u64 {
37///   let tx = lmdb::ReadTransaction::new(env).unwrap();
38///   let access = tx.access();
39///   access.get::<str, U<u64>>(db, key).unwrap().get()
40/// }
41/// ```
42#[repr(packed)]
43pub struct Unaligned<T : LmdbRawIfUnaligned>(T);
44
45impl<T : LmdbRawIfUnaligned> Clone for Unaligned<T> {
46    fn clone(&self) -> Self {
47        Unaligned(self.0)
48    }
49}
50
51impl<T : LmdbRawIfUnaligned> Copy for Unaligned<T> { }
52
53unsafe impl<T : LmdbRawIfUnaligned> LmdbRaw for Unaligned<T> {
54    fn reported_type() -> String {
55        format!("Unaligned<{}>", T::reported_type())
56    }
57}
58
59unsafe impl<T : LmdbRawIfUnaligned + LmdbOrdKeyIfUnaligned> LmdbOrdKey
60for Unaligned<T> {
61    fn ordered_by_bytes() -> bool { T::ordered_by_bytes() }
62    fn ordered_as_integer() -> bool { T::ordered_as_integer() }
63}
64
65impl<T : LmdbRawIfUnaligned> Unaligned<T> {
66    /// Wraps `t` in an `Unaligned` marker.
67    pub fn new(t: T) -> Self {
68        Unaligned(t)
69    }
70
71    /// Returns `t` as if it were wrapped by `Unaligned`.
72    ///
73    /// This is safe because any `&T` _is_ a valid `&Unaligned<T>`.
74    pub fn of_ref(t: &T) -> &Self {
75        unsafe { mem::transmute(t) }
76    }
77
78    /// Returns `t` as if it were wrapped by `Unaligned`.
79    ///
80    /// This is safe because any `&T` _is_ a valid `&Unaligned<T>`.
81    pub fn of_mut(t: &mut T) -> &mut Self {
82        unsafe { mem::transmute(t) }
83    }
84
85    /// Extracts the contained value.
86    ///
87    /// This is safe as the compiler has visibility into the fact that the
88    /// contained value is misaligned and can copy appropriately.
89    pub fn get(&self) -> T { self.0 }
90
91    /// Replaces the contained value.
92    pub fn set(&mut self, t: T) { self.0 = t; }
93}
94
95/// Synonym for `Unaligned::of_ref()`.
96pub fn unaligned<T : LmdbRawIfUnaligned>(t: &T) -> &Unaligned<T> {
97    Unaligned::of_ref(t)
98}
99
100// Since a future rust version may bar taking a reference to a member of a
101// packed structure (and it's not entirely safe right now), manually implement
102// everything to copy to a local variable and then delegate.
103
104macro_rules! deleg_fmt {
105    ($tr:ident) => {
106        impl<T : LmdbRawIfUnaligned> fmt::$tr for Unaligned<T> where T : fmt::$tr {
107            fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
108                let inner = self.0;
109                inner.fmt(fmt)
110            }
111        }
112    }
113}
114
115deleg_fmt!(Binary);
116deleg_fmt!(Debug);
117deleg_fmt!(Display);
118deleg_fmt!(LowerExp);
119deleg_fmt!(LowerHex);
120deleg_fmt!(Octal);
121deleg_fmt!(Pointer);
122deleg_fmt!(UpperExp);
123deleg_fmt!(UpperHex);
124
125impl<T : LmdbRawIfUnaligned + cmp::PartialEq<T>>
126cmp::PartialEq<Unaligned<T>> for Unaligned<T> {
127    fn eq(&self, other: &Self) -> bool {
128        let (lhs, rhs) = (self.0, other.0);
129        lhs.eq(&rhs)
130    }
131}
132impl<T : LmdbRawIfUnaligned + cmp::Eq> cmp::Eq for Unaligned<T> { }
133impl<T : LmdbRawIfUnaligned + cmp::PartialOrd<T>>
134cmp::PartialOrd<Unaligned<T>> for Unaligned<T> {
135    fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
136        let (lhs, rhs) = (self.0, other.0);
137        lhs.partial_cmp(&rhs)
138    }
139    fn lt(&self, other: &Self) -> bool {
140        let (lhs, rhs) = (self.0, other.0);
141        lhs.lt(&rhs)
142    }
143    fn le(&self, other: &Self) -> bool {
144        let (lhs, rhs) = (self.0, other.0);
145        lhs.le(&rhs)
146    }
147    fn gt(&self, other: &Self) -> bool {
148        let (lhs, rhs) = (self.0, other.0);
149        lhs.gt(&rhs)
150    }
151    fn ge(&self, other: &Self) -> bool {
152        let (lhs, rhs) = (self.0, other.0);
153        lhs.ge(&rhs)
154    }
155}
156impl<T : LmdbRawIfUnaligned + cmp::Ord> cmp::Ord for Unaligned<T> {
157    fn cmp(&self, other: &Self) -> cmp::Ordering {
158        let (lhs, rhs) = (self.0, other.0);
159        lhs.cmp(&rhs)
160    }
161}
162
163impl<T : LmdbRawIfUnaligned + Hash> Hash for Unaligned<T> {
164    fn hash<H : Hasher>(&self, state: &mut H) {
165        let v = self.0;
166        v.hash(state)
167    }
168}
169
170macro_rules! binop {
171    ($tr:ident, $meth:ident) => {
172        impl<T : LmdbRawIfUnaligned + ops::$tr<T>>
173        ops::$tr<Unaligned<T>> for Unaligned<T>
174        where T::Output : LmdbRawIfUnaligned {
175            type Output = Unaligned<T::Output>;
176            fn $meth(self, rhs: Self) -> Self::Output {
177                let (lhs, rhs) = (self.0, rhs.0);
178                Unaligned(lhs.$meth(rhs))
179            }
180        }
181    }
182}
183
184macro_rules! binopeq {
185    ($tr:ident, $meth:ident) => {
186        impl<T : LmdbRawIfUnaligned + ops::$tr<T>>
187        ops::$tr<Unaligned<T>> for Unaligned<T> {
188            fn $meth(&mut self, rhs: Self) {
189                let (mut lhs, rhs) = (self.0, rhs.0);
190                lhs.$meth(rhs);
191                self.0 = lhs;
192            }
193        }
194    }
195}
196
197binop!(Add, add);
198binop!(BitAnd, bitand);
199binop!(BitOr, bitor);
200binop!(BitXor, bitxor);
201binop!(Div, div);
202binop!(Mul, mul);
203binop!(Rem, rem);
204binop!(Shl, shl);
205binop!(Shr, shr);
206binop!(Sub, sub);
207
208binopeq!(AddAssign, add_assign);
209binopeq!(BitAndAssign, bitand_assign);
210binopeq!(BitOrAssign, bitor_assign);
211binopeq!(BitXorAssign, bitxor_assign);
212binopeq!(DivAssign, div_assign);
213binopeq!(MulAssign, mul_assign);
214binopeq!(RemAssign, rem_assign);
215binopeq!(ShlAssign, shl_assign);
216binopeq!(ShrAssign, shr_assign);
217binopeq!(SubAssign, sub_assign);