oxidd_manager_pointer/node/
fixed_arity.rs

1use std::cell::UnsafeCell;
2use std::hash::Hash;
3use std::hash::Hasher;
4use std::mem::MaybeUninit;
5use std::sync::atomic;
6use std::sync::atomic::AtomicUsize;
7use std::sync::atomic::Ordering::{Relaxed, Release};
8
9use arcslab::AtomicRefCounted;
10
11use oxidd_core::util::Borrowed;
12use oxidd_core::util::BorrowedEdgeIter;
13use oxidd_core::util::DropWith;
14use oxidd_core::AtomicLevelNo;
15use oxidd_core::Edge;
16use oxidd_core::HasLevel;
17use oxidd_core::InnerNode;
18use oxidd_core::LevelNo;
19use oxidd_core::Tag;
20
21use crate::manager;
22use crate::manager::InnerNodeCons;
23
24use super::NodeBase;
25
26pub struct NodeWithLevel<'id, ET, const TAG_BITS: u32, const ARITY: usize> {
27    rc: AtomicUsize,
28    level: AtomicLevelNo,
29    children: UnsafeCell<[manager::Edge<'id, Self, ET, TAG_BITS>; ARITY]>,
30}
31
32impl<'id, ET: Tag, const TAG_BITS: u32, const ARITY: usize>
33    NodeWithLevel<'id, ET, TAG_BITS, ARITY>
34{
35    const UNINIT_EDGE: MaybeUninit<manager::Edge<'id, Self, ET, TAG_BITS>> = MaybeUninit::uninit();
36}
37
38unsafe impl<ET, const TAG_BITS: u32, const ARITY: usize> AtomicRefCounted
39    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
40{
41    #[inline(always)]
42    fn retain(&self) {
43        if self.rc.fetch_add(1, Relaxed) > (usize::MAX >> 1) {
44            std::process::abort();
45        }
46    }
47
48    #[inline(always)]
49    unsafe fn release(&self) -> usize {
50        self.rc.fetch_sub(1, Release)
51    }
52
53    #[inline(always)]
54    fn current(&self) -> usize {
55        self.rc.load(Relaxed)
56    }
57}
58
59impl<ET: Tag, const TAG_BITS: u32, const ARITY: usize> PartialEq
60    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
61{
62    #[inline(always)]
63    fn eq(&self, other: &Self) -> bool {
64        // SAFETY: we have shared access to the node
65        unsafe { *self.children.get() == *other.children.get() }
66    }
67}
68impl<ET: Tag, const TAG_BITS: u32, const ARITY: usize> Eq
69    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
70{
71}
72
73impl<ET: Tag, const TAG_BITS: u32, const ARITY: usize> Hash
74    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
75{
76    #[inline(always)]
77    fn hash<H: Hasher>(&self, state: &mut H) {
78        // SAFETY: we have shared access to the node
79        unsafe { &*self.children.get() }.hash(state);
80    }
81}
82
83// SAFETY: The reference counter is initialized to 2, `load_rc` uses the given
84// ordering.
85unsafe impl<ET: Tag, const TAG_BITS: u32, const ARITY: usize> NodeBase
86    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
87{
88    #[inline(always)]
89    fn needs_drop() -> bool {
90        false
91    }
92
93    #[inline(always)]
94    fn load_rc(&self, order: atomic::Ordering) -> usize {
95        self.rc.load(order)
96    }
97}
98
99impl<'id, ET: Tag, const TAG_BITS: u32, const ARITY: usize>
100    DropWith<manager::Edge<'id, Self, ET, TAG_BITS>> for NodeWithLevel<'id, ET, TAG_BITS, ARITY>
101{
102    #[inline]
103    fn drop_with(self, drop_edge: impl Fn(manager::Edge<'id, Self, ET, TAG_BITS>)) {
104        for c in self.children.into_inner() {
105            drop_edge(c);
106        }
107    }
108}
109
110impl<'id, ET: Tag, const TAG_BITS: u32, const ARITY: usize>
111    InnerNode<manager::Edge<'id, Self, ET, TAG_BITS>> for NodeWithLevel<'id, ET, TAG_BITS, ARITY>
112{
113    const ARITY: usize = 2;
114
115    type ChildrenIter<'a>
116        = BorrowedEdgeIter<
117        'a,
118        manager::Edge<'id, Self, ET, TAG_BITS>,
119        std::slice::Iter<'a, manager::Edge<'id, Self, ET, TAG_BITS>>,
120    >
121    where
122        Self: 'a;
123
124    #[inline(always)]
125    fn new(
126        level: LevelNo,
127        children: impl IntoIterator<Item = manager::Edge<'id, Self, ET, TAG_BITS>>,
128    ) -> Self {
129        let mut it = children.into_iter();
130        let mut children = [Self::UNINIT_EDGE; ARITY];
131
132        for slot in &mut children {
133            slot.write(it.next().unwrap());
134        }
135        debug_assert!(it.next().is_none());
136
137        // SAFETY:
138        // - all elements are initialized
139        // - we effectively move out of `children`; the old `children` are not dropped
140        //   since they are `MaybeUninit`
141        //
142        // TODO: replace this by `MaybeUninit::transpose()` /
143        // `MaybeUninit::array_assume_init()` once stable
144        let children = unsafe {
145            std::ptr::read(std::ptr::addr_of!(children)
146                as *const [manager::Edge<'id, Self, ET, TAG_BITS>; ARITY])
147        };
148
149        Self {
150            rc: AtomicUsize::new(2),
151            level: AtomicLevelNo::new(level),
152            children: UnsafeCell::new(children),
153        }
154    }
155
156    #[inline(always)]
157    fn check_level(&self, check: impl FnOnce(LevelNo) -> bool) -> bool {
158        check(self.level.load(Relaxed))
159    }
160    #[inline(always)]
161    #[track_caller]
162    fn assert_level_matches(&self, level: LevelNo) {
163        assert_eq!(
164            self.level.load(Relaxed),
165            level,
166            "the level number does not match"
167        );
168    }
169
170    #[inline(always)]
171    fn children(&self) -> Self::ChildrenIter<'_> {
172        // SAFETY: we have shared access to the node
173        BorrowedEdgeIter::from(unsafe { &*self.children.get() }.iter())
174    }
175
176    #[inline(always)]
177    fn child(&self, n: usize) -> Borrowed<'_, manager::Edge<'id, Self, ET, TAG_BITS>> {
178        // SAFETY: we have shared access to the node
179        let children = unsafe { &*self.children.get() };
180        children[n].borrowed()
181    }
182
183    #[inline(always)]
184    unsafe fn set_child(
185        &self,
186        n: usize,
187        child: manager::Edge<'id, Self, ET, TAG_BITS>,
188    ) -> manager::Edge<'id, Self, ET, TAG_BITS> {
189        // SAFETY: we have exclusive access to the node and no child is
190        // referenced
191        let children = unsafe { &mut *self.children.get() };
192        std::mem::replace(&mut children[n], child)
193    }
194
195    #[inline(always)]
196    fn ref_count(&self) -> usize {
197        // Subtract 1 for the reference in the unique table
198        self.rc.load(Relaxed) - 1
199    }
200}
201
202unsafe impl<ET, const TAG_BITS: u32, const ARITY: usize> HasLevel
203    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
204{
205    #[inline(always)]
206    fn level(&self) -> LevelNo {
207        self.level.load(Relaxed)
208    }
209
210    #[inline(always)]
211    unsafe fn set_level(&self, level: LevelNo) {
212        self.level.store(level, Relaxed);
213    }
214}
215
216unsafe impl<ET: Send + Sync, const TAG_BITS: u32, const ARITY: usize> Send
217    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
218{
219}
220unsafe impl<ET: Send + Sync, const TAG_BITS: u32, const ARITY: usize> Sync
221    for NodeWithLevel<'_, ET, TAG_BITS, ARITY>
222{
223}
224
225pub struct NodeWithLevelCons<const ARITY: usize>;
226impl<ET: Tag, const TAG_BITS: u32, const ARITY: usize> InnerNodeCons<ET, TAG_BITS>
227    for NodeWithLevelCons<ARITY>
228{
229    type T<'id> = NodeWithLevel<'id, ET, TAG_BITS, ARITY>;
230}