smt_scope/mem_dbg/
utils.rs

1#[cfg(feature = "mem_dbg")]
2use mem_dbg::{MemDbg, MemSize};
3
4use core::{
5    hash::{Hash, Hasher},
6    ops::{Deref, DerefMut},
7};
8
9#[cfg(feature = "analysis")]
10use super::{FxHashMap, TiVec};
11
12// BoxSlice
13
14#[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))]
15#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
16#[derive(Debug, Clone)]
17pub enum BoxSlice<T> {
18    Large(Box<[T]>),
19    Small(T),
20}
21impl<T> Default for BoxSlice<T> {
22    fn default() -> Self {
23        Self::Large(Default::default())
24    }
25}
26impl<T> Deref for BoxSlice<T> {
27    type Target = [T];
28    fn deref(&self) -> &Self::Target {
29        match self {
30            Self::Large(slice) => slice,
31            Self::Small(slice) => core::slice::from_ref(slice),
32        }
33    }
34}
35impl<T> DerefMut for BoxSlice<T> {
36    fn deref_mut(&mut self) -> &mut Self::Target {
37        match self {
38            Self::Large(slice) => slice,
39            Self::Small(slice) => core::slice::from_mut(slice),
40        }
41    }
42}
43impl<T> FromIterator<T> for BoxSlice<T> {
44    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
45        assert!(Self::CHECK_T_SMALL);
46        let mut iter = iter.into_iter();
47        let Some(first) = iter.next() else {
48            return Self::default();
49        };
50        match iter.next() {
51            None => Self::Small(first),
52            Some(second) => {
53                let large = [first, second].into_iter().chain(iter).collect();
54                Self::Large(large)
55            }
56        }
57    }
58}
59impl<T> From<Vec<T>> for BoxSlice<T> {
60    fn from(vec: Vec<T>) -> Self {
61        assert!(Self::CHECK_T_SMALL);
62        match vec.len() {
63            1 => Self::Small(vec.into_iter().next().unwrap()),
64            _ => Self::Large(vec.into_boxed_slice()),
65        }
66    }
67}
68impl<T, const N: usize> From<[T; N]> for BoxSlice<T> {
69    fn from(array: [T; N]) -> Self {
70        assert!(Self::CHECK_T_SMALL);
71        array.into_iter().collect()
72    }
73}
74
75impl<T> From<BoxSlice<T>> for Vec<T> {
76    fn from(slice: BoxSlice<T>) -> Self {
77        match slice {
78            BoxSlice::Large(slice) => slice.into_vec(),
79            BoxSlice::Small(slice) => vec![slice],
80        }
81    }
82}
83
84impl<T> BoxSlice<T> {
85    #[allow(clippy::no_effect)]
86    const CHECK_T_SMALL: bool = {
87        let is_t_small = core::mem::size_of::<T>() <= core::mem::size_of::<usize>();
88        [(); 1][!is_t_small as usize]; // `size_of::<T>() > size_of::<usize>()`!
89        let is_no_ovhd = core::mem::size_of::<BoxSlice<T>>() == core::mem::size_of::<Box<[T]>>();
90        [(); 1][!is_no_ovhd as usize]; // `size_of::<BoxSlice<T>>() == size_of::<Box<[T]>>()`!
91        true
92    };
93}
94impl<T> core::borrow::Borrow<[T]> for BoxSlice<T> {
95    fn borrow(&self) -> &[T] {
96        self
97    }
98}
99impl<T> core::borrow::BorrowMut<[T]> for BoxSlice<T> {
100    fn borrow_mut(&mut self) -> &mut [T] {
101        self
102    }
103}
104
105impl<T> PartialEq for BoxSlice<T>
106where
107    [T]: PartialEq,
108{
109    fn eq(&self, other: &Self) -> bool {
110        (**self).eq(&**other)
111    }
112}
113impl<T> Eq for BoxSlice<T> where [T]: Eq {}
114
115impl<T> PartialOrd for BoxSlice<T>
116where
117    [T]: PartialOrd,
118{
119    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
120        (**self).partial_cmp(&**other)
121    }
122}
123
124impl<T> Ord for BoxSlice<T>
125where
126    [T]: Ord,
127{
128    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
129        (**self).cmp(&**other)
130    }
131}
132
133impl<T> Hash for BoxSlice<T>
134where
135    [T]: Hash,
136{
137    fn hash<H: Hasher>(&self, state: &mut H) {
138        (**self).hash(state)
139    }
140}
141
142// Issue 3
143// // SortedVec
144
145// #[cfg_attr(feature = "mem_dbg", derive(MemSize, MemDbg))]
146// #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
147// #[derive(Debug, Clone)]
148// pub struct SortedVec<T> {
149//     pub raw: Vec<T>,
150// }
151
152// impl<T> Default for SortedVec<T> {
153//     fn default() -> Self {
154//         Self {
155//             raw: Default::default(),
156//         }
157//     }
158// }
159
160// impl<T> SortedVec<T> {
161//     pub fn new() -> Self {
162//         Self::default()
163//     }
164
165//     pub fn push(&mut self, value: T) -> Result<(), std::collections::TryReserveError>
166//     where
167//         T: Ord,
168//     {
169//         self.push_by(value, |a, b| a.cmp(b))
170//     }
171
172//     pub fn push_by(
173//         &mut self,
174//         value: T,
175//         mut f: impl FnMut(&T, &T) -> core::cmp::Ordering,
176//     ) -> Result<(), std::collections::TryReserveError> {
177//         if !self
178//             .raw
179//             .last()
180//             .is_some_and(|last| f(&value, last) == core::cmp::Ordering::Less)
181//         {
182//             self.raw.try_reserve(1)?;
183//             self.raw.push(value);
184//             Ok(())
185//         } else {
186//             self.insert_by(value, f)
187//         }
188//     }
189
190//     pub fn insert(&mut self, value: T) -> Result<(), std::collections::TryReserveError>
191//     where
192//         T: Ord,
193//     {
194//         self.insert_by(value, |a, b| a.cmp(b))
195//     }
196
197//     pub fn insert_by(
198//         &mut self,
199//         value: T,
200//         mut f: impl FnMut(&T, &T) -> core::cmp::Ordering,
201//     ) -> Result<(), std::collections::TryReserveError> {
202//         self.raw.try_reserve(1)?;
203//         let idx = self
204//             .raw
205//             .binary_search_by(|v| f(v, &value))
206//             .unwrap_or_else(|idx| idx);
207//         self.raw.insert(idx, value);
208//         Ok(())
209//     }
210// }
211
212// impl<T> core::ops::Index<usize> for SortedVec<T> {
213//     type Output = T;
214//     fn index(&self, index: usize) -> &Self::Output {
215//         &self.raw[index]
216//     }
217// }
218
219// impl<T> core::ops::IndexMut<usize> for SortedVec<T> {
220//     fn index_mut(&mut self, index: usize) -> &mut Self::Output {
221//         &mut self.raw[index]
222//     }
223// }
224
225// InternMap
226
227#[cfg(feature = "analysis")]
228#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
229struct ValueRef<V: ?Sized + 'static> {
230    value: &'static V,
231    _marker: core::marker::PhantomData<V>,
232}
233
234#[cfg(feature = "analysis")]
235#[derive(Debug)]
236pub struct InternMap<K: Copy + From<usize>, V: Eq + Hash + ?Sized + 'static> {
237    #[cfg(feature = "analysis")]
238    map: FxHashMap<ValueRef<V>, K>,
239    interned: TiVec<K, Box<V>>,
240}
241
242#[cfg(feature = "analysis")]
243impl<K: Copy + From<usize>, V: ?Sized + Eq + Hash + 'static> Default for InternMap<K, V> {
244    fn default() -> Self {
245        Self {
246            map: Default::default(),
247            interned: Default::default(),
248        }
249    }
250}
251
252#[cfg(feature = "analysis")]
253impl<K: Copy + From<usize>, V: ?Sized + Eq + Hash + 'static> InternMap<K, V> {
254    pub fn intern(&mut self, v: Box<V>) -> K {
255        // SAFETY: `v` is stored in the `interned` vector, behind a `Box` so it
256        // will not be moved or dropped until the whole `InternMap` is dropped.
257        let value = unsafe { core::mem::transmute::<&V, &'static V>(v.as_ref()) };
258        let value_ref = ValueRef {
259            value,
260            _marker: core::marker::PhantomData,
261        };
262        *self
263            .map
264            .entry(value_ref)
265            .or_insert_with(|| self.interned.push_and_get_key(v))
266    }
267
268    pub fn finish(self) -> TiVec<K, Box<V>> {
269        self.interned
270    }
271}
272
273#[cfg(feature = "analysis")]
274impl<K: Copy + From<usize>, V: ?Sized + Eq + Hash + 'static> Deref for InternMap<K, V> {
275    type Target = TiVec<K, Box<V>>;
276    fn deref(&self) -> &Self::Target {
277        &self.interned
278    }
279}