oxidd_core/util/
mod.rs

1//! Various utilities
2
3use std::collections::BTreeSet;
4use std::collections::HashMap;
5use std::collections::HashSet;
6use std::hash::BuildHasher;
7use std::iter::FusedIterator;
8use std::marker::PhantomData;
9use std::mem::ManuallyDrop;
10use std::ops::Deref;
11use std::ops::DerefMut;
12
13use crate::Edge;
14use crate::LevelNo;
15use crate::Manager;
16use crate::NodeID;
17
18pub mod edge_hash_map;
19pub use edge_hash_map::EdgeHashMap;
20pub mod num;
21mod substitution;
22pub use substitution::*;
23
24pub use nanorand::WyRand as Rng;
25
26/// Borrowed version of some handle
27///
28/// A handle is typically just a pointer, but we cannot copy it as we need to
29/// update the reference counters accordingly. However, if we want to have
30/// multiple instances of the same handle without changing the reference
31/// counters, we can use shared references. This works as long as one does not
32/// attempt to change the handle's tag. In this case, we need ownership of the
33/// handle with the different tag, but need to restrict its lifetime to the one
34/// of the original handle. Furthermore, we *must not* drop the new handle. This
35/// is exactly, what this data structure provides.
36///
37/// `Borrowed<'a, H>` always has the same representation as `H`.
38#[repr(transparent)]
39#[derive(PartialEq, Eq, PartialOrd, Ord)]
40pub struct Borrowed<'a, H>(ManuallyDrop<H>, PhantomData<&'a H>);
41
42impl<'a, H> Borrowed<'a, H> {
43    /// Create a new borrowed handle
44    ///
45    /// While the type of `handle` suggests that the handle is owned, it should
46    /// be just the owned representation of a borrowed handle, e.g. no reference
47    /// counters should be increased when creating `handle`.
48    #[must_use]
49    #[inline]
50    pub fn new(handle: H) -> Self {
51        Self(ManuallyDrop::new(handle), PhantomData)
52    }
53
54    /// Convert a borrowed handle into the underlying [`ManuallyDrop`] handle
55    ///
56    /// # Safety
57    ///
58    /// The caller must ensure that the resources referenced by the handle
59    /// remain valid during its usage. Furthermore the returned handle must not
60    /// be dropped.
61    #[inline]
62    pub unsafe fn into_inner(this: Self) -> ManuallyDrop<H> {
63        this.0
64    }
65}
66
67impl<'a, H> Deref for Borrowed<'a, H> {
68    type Target = H;
69
70    #[inline]
71    fn deref(&self) -> &H {
72        self.0.deref()
73    }
74}
75
76impl<'a, E: Edge> Borrowed<'a, E> {
77    /// Change the tag of a borrowed [`Edge`]
78    ///
79    /// This is equivalent to [`Edge::with_tag()`], but can be used in some
80    /// situations where [`Edge::with_tag()`] can't due to lifetime
81    /// restrictions.
82    #[inline]
83    pub fn edge_with_tag(self, tag: E::Tag) -> Self {
84        // We must not drop `edge`. This would happen if `Edge::with_tag_owned`
85        // panicked (which it definitely shouldn't, but …). Dropping the edge
86        // itself shouldn't immediately lead to undefined behavior, but we need
87        // to make sure that the owned edge is not dropped as well, so we abort
88        // in this case.
89        let guard = AbortOnDrop("`Edge::with_tag_owned` panicked.");
90        let edge = ManuallyDrop::into_inner(self.0);
91        let res = Self(ManuallyDrop::new(edge.with_tag_owned(tag)), PhantomData);
92        guard.defuse();
93        res
94    }
95}
96
97/// Drop functionality for containers of [`Edge`]s
98///
99/// An edge on its own cannot be dropped: It may well be just an integer index
100/// for an array. In this case we are lacking the base pointer, or more
101/// abstractly the [`Manager`]. Most edge containers cannot store a manager
102/// reference, be it for memory consumption or lifetime restrictions. This trait
103/// provides methods to drop such a container with an externally supplied
104/// function to drop edges.
105pub trait DropWith<E: Edge>: Sized {
106    /// Drop `self`
107    ///
108    /// Among dropping other parts, this calls `drop_edge` for all children.
109    ///
110    /// Having [`Self::drop_with_manager()`] only is not enough: To drop a
111    /// [`Function`][crate::function::Function], we should not require a manager
112    /// lock, otherwise we might end up in a dead-lock (if we are in a
113    /// [`.with_manager_exclusive()`][crate::function::Function::with_manager_exclusive]
114    /// block). So we cannot provide a `&Manager` reference. Furthermore, when
115    /// all [`Function`][crate::function::Function]s and
116    /// [`ManagerRef`][crate::ManagerRef]s referencing a manager are gone and
117    /// the manager needs to drop e.g. the apply cache, it may also provide a
118    /// function that only forgets edges rather than actually dropping them,
119    /// saving the (in this case) unnecessary work of changing reference
120    /// counters etc.
121    fn drop_with(self, drop_edge: impl Fn(E));
122
123    /// Drop `self`
124    ///
125    /// This is equivalent to `self.drop_with(|e| manager.drop_edge(e))`.
126    ///
127    /// Among dropping other parts, this calls
128    /// [`manager.drop_edge()`][Manager::drop_edge] for all children.
129    #[inline]
130    fn drop_with_manager<M: Manager<Edge = E>>(self, manager: &M) {
131        self.drop_with(|e| manager.drop_edge(e));
132    }
133}
134
135/// A container that may hold "weak" references to nodes and needs to be
136/// informed about operations potentially removing nodes
137///
138/// The main motivation behind this trait is the following observation: When
139/// using reference counting to implement garbage collection of dead nodes,
140/// cloning and dropping edges when inserting entries into the apply cache may
141/// cause many CPU cache misses. To circumvent this performance issue, the apply
142/// cache may store [`Borrowed<M::Edge>`]s (e.g., using the unsafe
143/// [`Borrowed::into_inner()`]). Now the apply cache implementation has to
144/// guarantee that every edge returned by the [`get()`][crate::ApplyCache::get]
145/// method still points to a valid node. To that end, the cache may, e.g., clear
146/// itself when [`Self::pre_gc()`] is called and reject any insertion of new
147/// entries until [`Self::post_gc()`].
148pub trait GCContainer<M: Manager> {
149    /// Prepare for garbage collection
150    ///
151    /// The implementing container data structure may rely on that this method
152    /// is called before the `Manager` performs garbage collection or any
153    /// other operation that possibly removes nodes from the manager. (If
154    /// this is required for SAFETY, however, creation of a `GCContainer`
155    /// instance must be marked unsafe).
156    ///
157    /// This method may lock (parts of) `self`. Unlocking is then done in
158    /// [`Self::post_gc()`].
159    fn pre_gc(&self, manager: &M);
160
161    /// Post-process a garbage collection
162    ///
163    /// # Safety
164    ///
165    /// Each call to this method must be paired with a distinct preceding
166    /// [`Self::pre_gc()`] call. All operations potentially removing nodes must
167    /// happen between [`Self::pre_gc()`] and the call to this method.
168    unsafe fn post_gc(&self, manager: &M);
169}
170
171/// Drop guard for edges to ensure that they are not leaked
172pub struct EdgeDropGuard<'a, M: Manager> {
173    manager: &'a M,
174    edge: ManuallyDrop<M::Edge>,
175}
176
177impl<'a, M: Manager> EdgeDropGuard<'a, M> {
178    /// Create a new drop guard
179    #[inline]
180    pub fn new(manager: &'a M, edge: M::Edge) -> Self {
181        Self {
182            manager,
183            edge: ManuallyDrop::new(edge),
184        }
185    }
186
187    /// Convert `this` into the contained edge
188    #[inline]
189    pub fn into_edge(mut self) -> M::Edge {
190        // SAFETY: `this.edge` is never used again, we drop `this` below
191        let edge = unsafe { ManuallyDrop::take(&mut self.edge) };
192        std::mem::forget(self);
193        edge
194    }
195}
196
197impl<'a, M: Manager> Drop for EdgeDropGuard<'a, M> {
198    #[inline]
199    fn drop(&mut self) {
200        // SAFETY: `self.edge` is never used again.
201        self.manager
202            .drop_edge(unsafe { ManuallyDrop::take(&mut self.edge) });
203    }
204}
205
206impl<'a, M: Manager> Deref for EdgeDropGuard<'a, M> {
207    type Target = M::Edge;
208
209    #[inline]
210    fn deref(&self) -> &Self::Target {
211        &self.edge
212    }
213}
214impl<'a, M: Manager> DerefMut for EdgeDropGuard<'a, M> {
215    #[inline]
216    fn deref_mut(&mut self) -> &mut Self::Target {
217        &mut self.edge
218    }
219}
220
221/// Drop guard for vectors of edges to ensure that they are not leaked
222pub struct EdgeVecDropGuard<'a, M: Manager> {
223    manager: &'a M,
224    vec: Vec<M::Edge>,
225}
226
227impl<'a, M: Manager> EdgeVecDropGuard<'a, M> {
228    /// Create a new drop guard
229    #[inline]
230    pub fn new(manager: &'a M, vec: Vec<M::Edge>) -> Self {
231        Self { manager, vec }
232    }
233
234    /// Convert `this` into the contained edge
235    #[inline]
236    pub fn into_vec(mut self) -> Vec<M::Edge> {
237        std::mem::take(&mut self.vec)
238    }
239}
240
241impl<'a, M: Manager> Drop for EdgeVecDropGuard<'a, M> {
242    #[inline]
243    fn drop(&mut self) {
244        for e in std::mem::take(&mut self.vec) {
245            self.manager.drop_edge(e);
246        }
247    }
248}
249
250impl<'a, M: Manager> Deref for EdgeVecDropGuard<'a, M> {
251    type Target = Vec<M::Edge>;
252
253    #[inline]
254    fn deref(&self) -> &Self::Target {
255        &self.vec
256    }
257}
258impl<'a, M: Manager> DerefMut for EdgeVecDropGuard<'a, M> {
259    #[inline]
260    fn deref_mut(&mut self) -> &mut Self::Target {
261        &mut self.vec
262    }
263}
264
265/// Drop guard for inner nodes to ensure that they are not leaked
266pub struct InnerNodeDropGuard<'a, M: Manager> {
267    manager: &'a M,
268    node: ManuallyDrop<M::InnerNode>,
269}
270
271impl<'a, M: Manager> InnerNodeDropGuard<'a, M> {
272    /// Create a new drop guard
273    #[inline]
274    pub fn new(manager: &'a M, node: M::InnerNode) -> Self {
275        Self {
276            manager,
277            node: ManuallyDrop::new(node),
278        }
279    }
280
281    /// Convert `this` into the contained node
282    #[inline]
283    pub fn into_node(mut this: Self) -> M::InnerNode {
284        // SAFETY: `this.edge` is never used again, we drop `this` below
285        let node = unsafe { ManuallyDrop::take(&mut this.node) };
286        std::mem::forget(this);
287        node
288    }
289}
290
291impl<'a, M: Manager> Drop for InnerNodeDropGuard<'a, M> {
292    #[inline]
293    fn drop(&mut self) {
294        // SAFETY: `self.node` is never used again.
295        unsafe { ManuallyDrop::take(&mut self.node) }.drop_with(|e| self.manager.drop_edge(e));
296    }
297}
298
299impl<'a, M: Manager> Deref for InnerNodeDropGuard<'a, M> {
300    type Target = M::InnerNode;
301
302    #[inline]
303    fn deref(&self) -> &Self::Target {
304        &self.node
305    }
306}
307impl<'a, M: Manager> DerefMut for InnerNodeDropGuard<'a, M> {
308    #[inline]
309    fn deref_mut(&mut self) -> &mut Self::Target {
310        &mut self.node
311    }
312}
313
314/// Iterator that yields borrowed edges ([`Borrowed<'a, E>`][Borrowed]) provided
315/// that `I` is an iterator that yields `&'a E`.
316pub struct BorrowedEdgeIter<'a, E, I>(I, PhantomData<Borrowed<'a, E>>);
317
318impl<'a, E: Edge, I: Iterator<Item = &'a E>> From<I> for BorrowedEdgeIter<'a, E, I> {
319    fn from(it: I) -> Self {
320        Self(it, PhantomData)
321    }
322}
323
324impl<'a, E: Edge, I: Iterator<Item = &'a E>> Iterator for BorrowedEdgeIter<'a, E, I> {
325    type Item = Borrowed<'a, E>;
326
327    #[inline]
328    fn next(&mut self) -> Option<Self::Item> {
329        match self.0.next() {
330            Some(edge) => Some(edge.borrowed()),
331            None => None,
332        }
333    }
334
335    #[inline]
336    fn size_hint(&self) -> (usize, Option<usize>) {
337        self.0.size_hint()
338    }
339}
340
341impl<'a, E: Edge, I: FusedIterator<Item = &'a E>> FusedIterator for BorrowedEdgeIter<'a, E, I> {}
342
343impl<'a, E: Edge, I: ExactSizeIterator<Item = &'a E>> ExactSizeIterator
344    for BorrowedEdgeIter<'a, E, I>
345{
346    #[inline]
347    fn len(&self) -> usize {
348        self.0.len()
349    }
350}
351
352/// Set of nodes
353pub trait NodeSet<E>: Clone + Default + Eq {
354    /// Get the number of nodes in the set
355    #[must_use]
356    fn len(&self) -> usize;
357
358    /// Returns `true` iff there are no nodes in the set
359    #[must_use]
360    fn is_empty(&self) -> bool {
361        self.len() == 0
362    }
363
364    /// Add a node (the node to which edge points) to the set
365    ///
366    /// Returns `true` if the element was added (i.e. not previously present).
367    fn insert(&mut self, edge: &E) -> bool;
368
369    /// Return `true` if the set contains the given node
370    #[must_use]
371    fn contains(&self, edge: &E) -> bool;
372
373    /// Remove a node from the set
374    ///
375    /// Returns `true` if the node was present in the set.
376    fn remove(&mut self, edge: &E) -> bool;
377}
378impl<E: Edge, S: Clone + Default + BuildHasher> NodeSet<E> for HashSet<NodeID, S> {
379    #[inline]
380    fn len(&self) -> usize {
381        HashSet::len(self)
382    }
383    #[inline]
384    fn insert(&mut self, edge: &E) -> bool {
385        self.insert(edge.node_id())
386    }
387    #[inline]
388    fn contains(&self, edge: &E) -> bool {
389        self.contains(&edge.node_id())
390    }
391    #[inline]
392    fn remove(&mut self, edge: &E) -> bool {
393        self.remove(&edge.node_id())
394    }
395}
396impl<E: Edge> NodeSet<E> for BTreeSet<NodeID> {
397    #[inline]
398    fn len(&self) -> usize {
399        BTreeSet::len(self)
400    }
401    #[inline]
402    fn insert(&mut self, edge: &E) -> bool {
403        self.insert(edge.node_id())
404    }
405    #[inline]
406    fn contains(&self, edge: &E) -> bool {
407        self.contains(&edge.node_id())
408    }
409    #[inline]
410    fn remove(&mut self, edge: &E) -> bool {
411        self.remove(&edge.node_id())
412    }
413}
414
415/// Optional Boolean with `repr(i8)`
416#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
417#[repr(i8)]
418pub enum OptBool {
419    /// Don't care
420    None = -1,
421    #[allow(missing_docs)]
422    False = 0,
423    #[allow(missing_docs)]
424    True = 1,
425}
426
427impl From<bool> for OptBool {
428    fn from(value: bool) -> Self {
429        if value {
430            Self::True
431        } else {
432            Self::False
433        }
434    }
435}
436
437/// Zero-sized struct that calls [`std::process::abort()`] if dropped
438///
439/// This is useful to make code exception safe. If there is a region that must
440/// not panic for safety reasons, you can use this to prevent further unwinding,
441/// at least.
442///
443/// Before aborting, the provided string is printed. If the guarded code in the
444/// example panics, `FATAL: Foo panicked. Aborting.` will be printed to stderr.
445///
446/// ## Example
447///
448/// ```
449/// # use oxidd_core::util::AbortOnDrop;
450/// let panic_guard = AbortOnDrop("Foo panicked.");
451/// // ... code that might panic ...
452/// panic_guard.defuse();
453/// ```
454pub struct AbortOnDrop(pub &'static str);
455
456impl AbortOnDrop {
457    /// Consume `self` without aborting the process.
458    ///
459    /// Equivalent to `std::mem::forget(self)`.
460    #[inline(always)]
461    pub fn defuse(self) {
462        std::mem::forget(self);
463    }
464}
465
466impl Drop for AbortOnDrop {
467    fn drop(&mut self) {
468        eprintln!("FATAL: {} Aborting.", self.0);
469        std::process::abort();
470    }
471}
472
473/// Out of memory error type
474#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
475pub struct OutOfMemory;
476
477/// Result type with [`OutOfMemory`] error
478pub type AllocResult<T> = Result<T, OutOfMemory>;
479
480/// Is the underlying type a floating point number?
481pub trait IsFloatingPoint {
482    /// `true` iff the underlying type is a floating point number
483    const FLOATING_POINT: bool;
484
485    /// One greater than the minimum possible normal power of 2 exponent, see
486    /// [`f64::MIN_EXP`] for instance. `0` for integers
487    const MIN_EXP: i32;
488}
489
490// dirty hack until we have specialization
491/// cbindgen:ignore
492impl<T: std::ops::ShlAssign<i32>> IsFloatingPoint for T {
493    const FLOATING_POINT: bool = false;
494    const MIN_EXP: i32 = 0;
495}
496
497/// A number type suitable for counting satisfying assignments
498pub trait SatCountNumber:
499    Clone
500    + From<u32>
501    + for<'a> std::ops::AddAssign<&'a Self>
502    + for<'a> std::ops::SubAssign<&'a Self>
503    + std::ops::ShlAssign<u32>
504    + std::ops::ShrAssign<u32>
505    + IsFloatingPoint
506{
507}
508
509impl<
510        T: Clone
511            + From<u32>
512            + for<'a> std::ops::AddAssign<&'a T>
513            + for<'a> std::ops::SubAssign<&'a T>
514            + std::ops::ShlAssign<u32>
515            + std::ops::ShrAssign<u32>
516            + IsFloatingPoint,
517    > SatCountNumber for T
518{
519}
520
521/// Cache for counting satisfying assignments
522pub struct SatCountCache<N: SatCountNumber, S: BuildHasher> {
523    /// Main map from [`NodeID`]s to their model count
524    pub map: HashMap<NodeID, N, S>,
525
526    /// Number of variables in the domain
527    vars: LevelNo,
528
529    /// Epoch to indicate if the cache is still valid.
530    ///
531    /// While reordering preserves semantics (and therefore also the count of
532    /// satisfying assignments), nodes may be deleted and their [`NodeID`]s may
533    /// get reused afterwards. The `map` should only be considered valid if
534    /// `epoch` is [`Manager::reorder_count()`].
535    epoch: u64,
536}
537
538impl<N: SatCountNumber, S: BuildHasher + Default> Default for SatCountCache<N, S> {
539    fn default() -> Self {
540        Self {
541            map: HashMap::default(),
542            vars: 0,
543            epoch: 0,
544        }
545    }
546}
547
548impl<N: SatCountNumber, S: BuildHasher> SatCountCache<N, S> {
549    /// Create a new satisfiability counting cache
550    pub fn with_hasher(hash_builder: S) -> Self {
551        Self {
552            map: HashMap::with_hasher(hash_builder),
553            vars: 0,
554            epoch: 0,
555        }
556    }
557
558    /// Clear the cache if it has become invalid due to reordering or a change
559    /// in the number of variables
560    pub fn clear_if_invalid<M: Manager>(&mut self, manager: &M, vars: LevelNo) {
561        let epoch = manager.reorder_count();
562        if epoch != self.epoch || vars != self.vars {
563            self.epoch = epoch;
564            self.vars = vars;
565            self.map.clear();
566        }
567    }
568}