order_maintenance/lib.rs
1//! Totally-ordered priorities.
2//!
3//! See documentation for [`Priority`].
4use slotmap::{self, HopSlotMap};
5use std::cell::RefCell;
6use std::cmp::Ordering;
7use std::rc::Rc;
8
9slotmap::new_key_type! {
10 /// Reference to a [`Priority`], used as a key to [`Arena::priorities`].
11 struct PriorityRef;
12}
13
14impl PriorityRef {
15 /// "Dereferences" this index in an arena.
16 ///
17 /// Basically flips the arguments of [`Arena::get()`], but since this is in postfix, it's
18 /// useful for chaining a series of operations.
19 fn as_ref(self, arena: &Arena) -> &PriorityInner {
20 arena.get(self)
21 }
22}
23
24/// Shared state between all priorities that can be compared.
25#[derive(Debug, Default)]
26struct Arena {
27 /// Total number of priorities allocated in this arena.
28 total: usize,
29
30 /// Internal store of priorities, indexed by [`PriorityRef`].
31 priorities: HopSlotMap<PriorityRef, PriorityInner>,
32
33 /// Key to the base priority, which should never be deleted (unless the arena is dropped).
34 base: PriorityRef,
35}
36
37impl Arena {
38 /// Label for the initial priority allocated in this arena.
39 const BASE: usize = 0;
40
41 /// The total number of labels that can be allocated in this arena.
42 // const SIZE: usize = 1 << (usize::BITS - 1);
43
44 fn new_with_priority() -> (Self, PriorityRef) {
45 let mut priorities = HopSlotMap::with_key();
46
47 let base = priorities.insert_with_key(|k| PriorityInner {
48 next: RefCell::new(k),
49 prev: RefCell::new(k),
50 label: RefCell::new(Arena::BASE),
51 ref_count: RefCell::new(1),
52 });
53
54 let first = priorities.insert(PriorityInner {
55 next: RefCell::new(base),
56 prev: RefCell::new(base),
57 label: RefCell::new(usize::MAX / 2),
58 ref_count: RefCell::new(1),
59 });
60
61 unsafe {
62 let base_prio = priorities.get_unchecked(base);
63 base_prio.set_next(first);
64 base_prio.set_prev(first);
65 }
66
67 (
68 Self {
69 total: 1,
70 priorities,
71 base,
72 },
73 first,
74 )
75 }
76
77 /// Insert a new priority into priorities store, constructing that priority using the given
78 /// closure that takes the new key as argument.
79 fn insert(&mut self, f: impl FnOnce(PriorityRef) -> PriorityInner) -> PriorityRef {
80 self.total += 1;
81 self.priorities.insert_with_key(f)
82 }
83
84 /// Remove a priority from the priorities store.
85 fn remove(&mut self, key: PriorityRef) {
86 self.priorities.remove(key);
87 self.total -= 1;
88 }
89
90 /// Retrieve a reference to a priority from the priorities store using a key.
91 fn get(&self, key: PriorityRef) -> &PriorityInner {
92 self.priorities.get(key).unwrap()
93 }
94}
95
96/// Contains the actual data of a priority.
97///
98/// To circumvent Rust mutability rules, all fields stored in here are guarded by [`RefCell`]s.
99/// Helpers are used to eliminate boilerplate, and to create a level of abstraction, beneath with
100/// optimizations can take place.
101#[derive(Debug)]
102struct PriorityInner {
103 /// Pointer to the next priority in the linked list.
104 next: RefCell<PriorityRef>,
105 /// Pointer to the previous priority in the linked list.
106 prev: RefCell<PriorityRef>,
107 /// Label that is used to numerically compare
108 label: RefCell<usize>,
109 /// Reference count; when this reaches zero, it will be deallocated from the [`Arena`].
110 ref_count: RefCell<usize>,
111}
112
113impl PriorityInner {
114 fn next(&self) -> PriorityRef {
115 *self.next.borrow()
116 }
117
118 fn set_next(&self, next: PriorityRef) {
119 *self.next.borrow_mut() = next;
120 }
121
122 fn prev(&self) -> PriorityRef {
123 *self.prev.borrow()
124 }
125
126 fn set_prev(&self, prev: PriorityRef) {
127 *self.prev.borrow_mut() = prev;
128 }
129
130 fn label(&self) -> usize {
131 *self.label.borrow()
132 }
133
134 fn set_label(&self, label: usize) {
135 *self.label.borrow_mut() = label;
136 }
137
138 /// Compute the "weight" of some `other` priority, relative to this.
139 ///
140 /// The math used for this computation is not entirely intuitive, but is discussed in detail in
141 /// Dietz & Sleator and Bender et al.'s papers on the order maintenance problem.
142 fn weight(&self, other: &Self) -> usize {
143 other.label().wrapping_sub(self.label())
144 }
145
146 /// Increment the reference count.
147 fn ref_inc(&self) {
148 *self.ref_count.borrow_mut() += 1;
149 }
150
151 /// Decrement the reference count; returns true when it reaches zero (time to deallocate).
152 fn ref_dec(&self) -> bool {
153 *self.ref_count.borrow_mut() -= 1;
154 *self.ref_count.borrow() == 0
155 }
156}
157
158/// A totally-ordered priority.
159///
160/// These priorities implement Dietz & Sleator (1987)'s solution to the order maintenance problem,
161/// which require a data structure `T` that supports insertion and comparison operations such that
162/// insertion constructs an element of the next greatest priority:
163///
164/// ```text
165/// forall t: T, t < t.insert()
166/// ```
167///
168/// but is still lower priority than all other greater priorities:
169///
170/// ```text
171/// forall t t': T s.t. t < t', t.insert() < t'
172/// ```
173///
174/// Amongst a collection of `n` priorities, comparison takes constant time, while insertion takes
175/// amortized `log(n)` time.
176///
177/// ## Usage
178///
179/// ```rust
180/// # use order_maintenance::*;
181/// let p0 = Priority::new();
182/// let p2 = p0.insert();
183/// let p1 = p0.insert();
184/// let p3 = p2.insert();
185///
186/// assert!(p0 < p1);
187/// assert!(p0 < p2);
188/// assert!(p0 < p3);
189/// assert!(p1 < p2);
190/// assert!(p1 < p3);
191/// assert!(p2 < p3);
192/// ```
193///
194/// ## Memory management
195///
196/// Under the hood, these priorities are actually references to nodes of a circular linked list,
197/// allocated from an arena. Those nodes are reference-counted, which allows these priorities to be
198/// cloned. The node's reference count is decremented when this priority is dropped, and if it
199/// reaches zero, the node is deallocated.
200///
201/// Priorities from different arenas cannot be compared with one another.
202///
203/// ## Algorithm
204///
205/// This implementation uses Dietz & Sleator (1987)'s algorithm, also called tag-range relabeling
206/// (as opposed to Bender et al.'s list-range relabeling algorithm).
207///
208/// While Dietz & Sleator also propose a data structure that supports constant-time insertion, that
209/// data structure is so overwhelmingly complex that the overhead of maintaining such a data
210/// structure will overwhelm any theoretical efficiency for any reasonable number of priorities.
211///
212/// More recently, Bender et al. proposed an alternative solution, using a list-range relabling
213/// approach. That approach is likely more efficient on real hardware, since it favors bit-wise
214/// operations over multiplication and division. For now, this crate uses the possibly slower
215/// tag-range relabeling approach, because it was ported from a scripting language that is better
216/// suited toward floating operations. It remains to be seen which implementation is better under
217/// which circumstances.
218///
219/// ## References
220///
221/// - Paul F. Dietz and Daniel D. Sleator. _Two Algorithms for Maintaining Order in a List._ 1987.
222///
223/// - Michael A. Bender, Richard Cole, Erik D. Demaine, Martin Farach-Colton, and Jack Zito.
224/// _Two simplified algorithms for maintaining order in a list._ 2002.
225#[derive(Debug)]
226pub struct Priority {
227 arena: Rc<RefCell<Arena>>,
228 this: PriorityRef,
229}
230
231impl Priority {
232 /// Allocate a new priority in a fresh arena.
233 ///
234 /// Note that priorities allocated in separate arenas cannot be compared; to construct
235 /// a [`Priority`] that can be compared to some existing priority, use [`Priority::insert()`].
236 #[allow(clippy::new_without_default)]
237 pub fn new() -> Self {
238 let (arena, key) = Arena::new_with_priority();
239 Self {
240 arena: Rc::new(RefCell::new(arena)),
241 this: key,
242 }
243 }
244
245 /// Allocate the next greatest priority after the given `self`.
246 pub fn insert(&self) -> Self {
247 let key = self.arena_mut(|arena| {
248 let this = self.this.as_ref(arena);
249
250 // Before we insert anything, we first need to relabel successive priorities in
251 // order to ensure labels are evenly distributed.
252
253 // Search for how many nodes we need to relabel, and its weight
254 let (count, weight) = {
255 let mut count = 1;
256 let mut prio = this.next().as_ref(arena);
257 while this.weight(prio) != 0 && this.weight(prio) <= count ^ 2 {
258 prio = prio.next().as_ref(arena);
259 count += 1;
260 }
261 (count, this.weight(prio))
262 };
263
264 // Now, adjust labels of those nodes
265 let mut prio = this.next().as_ref(arena);
266 for k in 1..count {
267 // if weight == 0, then it should actually encode usize::MAX + 1.
268 let weight_k = if weight == 0 {
269 // Since we can't actually represent usize::MAX + 1, we just multiply it by
270 // ((usize::MAX + 1) / 2) AKA (1 << (usize::BITS / 2)), and then multiply by 2.
271 k.wrapping_mul(1 << (usize::BITS / 2)).wrapping_mul(2)
272 } else {
273 k.wrapping_mul(weight)
274 };
275 prio.set_label((weight_k / count).wrapping_add(this.label()));
276
277 prio = prio.next().as_ref(arena);
278 }
279
280 // Compute new priority fields
281 let new_label = // New label is half-way between this and next
282 this.label().wrapping_add(this.next().as_ref(arena).label().wrapping_sub(this.label()) / 2);
283 let new_prev = self.this;
284 let new_next = this.next();
285
286 // Allocate new node
287 let new_key = arena.insert(|_| PriorityInner {
288 next: RefCell::new(new_next),
289 prev: RefCell::new(new_prev),
290 label: RefCell::new(new_label),
291 ref_count: RefCell::new(1),
292 });
293
294 // Fix up pointers to point to newly allocated node
295 let this = self.this.as_ref(arena); // appease borrow checker (:
296 this.next().as_ref(arena).set_prev(new_key); // self.next.prev = new
297 this.set_next(new_key); // self.next = new
298
299 new_key
300 });
301
302 Self {
303 arena: self.arena.clone(),
304 this: key,
305 }
306 }
307
308 /// Execute callback with shared reference to arena.
309 ///
310 /// Ugly, but useful for not exposing [`RefCell`] or [`std::cell::Ref`].
311 fn arena<T>(&self, f: impl FnOnce(&Arena) -> T) -> T {
312 f(&self.arena.borrow())
313 }
314
315 /// Execute callback with mutable reference to arena.
316 ///
317 /// Ugly, but useful for not exposing [`RefCell`] or [`std::cell::RefMut`].
318 fn arena_mut<T>(&self, f: impl FnOnce(&mut Arena) -> T) -> T {
319 f(&mut self.arena.borrow_mut())
320 }
321
322 fn relative(&self) -> usize {
323 self.arena(|a| {
324 self.this
325 .as_ref(a)
326 .label()
327 .wrapping_sub(a.base.as_ref(a).label())
328 })
329 }
330}
331
332impl Clone for Priority {
333 fn clone(&self) -> Self {
334 // Increment ref count of the `PriorityInner`.
335 self.arena(|a| self.this.as_ref(a).ref_inc());
336
337 Self {
338 arena: self.arena.clone(),
339 this: self.this,
340 }
341 }
342}
343
344impl PartialEq for Priority {
345 fn eq(&self, other: &Self) -> bool {
346 Rc::ptr_eq(&self.arena, &other.arena) && self.this == other.this
347 }
348}
349
350impl Eq for Priority {}
351
352impl PartialOrd for Priority {
353 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
354 if !Rc::ptr_eq(&self.arena, &other.arena) {
355 return None;
356 }
357
358 if self.this == other.this {
359 return Some(Ordering::Equal);
360 }
361
362 self.relative().partial_cmp(&other.relative())
363 }
364}
365
366impl Drop for Priority {
367 fn drop(&mut self) {
368 self.arena_mut(|a| {
369 if self.this.as_ref(a).ref_dec() {
370 // Ref count reached zero; remove this node from the linked list, then deallocate
371 // it from the arena.
372
373 let next = self.this.as_ref(a).next();
374 let prev = self.this.as_ref(a).prev();
375
376 // self.next.prev = self.prev
377 next.as_ref(a).set_prev(prev);
378
379 // self.prev.next = self.next
380 prev.as_ref(a).set_next(next);
381
382 a.remove(self.this)
383 }
384 });
385 }
386}
387
388#[cfg(test)]
389mod tests {
390 use super::*;
391
392 #[test]
393 fn drop_single() {
394 let _p = Priority::new();
395 }
396
397 #[test]
398 fn compare_two() {
399 let p1 = Priority::new();
400 let p2 = p1.insert();
401 assert!(p1 < p2);
402 }
403
404 #[test]
405 fn insertion() {
406 let p1 = Priority::new();
407 let p3 = p1.insert();
408 let p2 = p1.insert();
409
410 assert!(p1 < p2);
411 assert!(p2 < p3);
412 assert!(p1 < p3);
413 }
414
415 #[test]
416 fn transitive() {
417 let p1 = Priority::new();
418 let p2 = p1.insert();
419 let p3 = p2.insert();
420
421 assert!(p1 < p2);
422 assert!(p2 < p3);
423 assert!(p1 < p3);
424 }
425
426 #[test]
427 fn no_leak() {
428 let a = {
429 let p1 = Priority::new();
430 let _p2 = p1.insert();
431 let _p3 = p1.insert();
432 p1.arena.clone()
433 };
434 assert!(a.borrow().priorities.len() == 1);
435 }
436
437 #[test]
438 fn can_clone() {
439 let a = {
440 let p1 = Priority::new();
441 let p2 = p1.insert();
442 let p3 = p2.insert();
443
444 {
445 let p1 = p1.clone();
446
447 assert!(p1 < p2);
448 assert!(p2 < p3);
449 assert!(p1 < p3);
450 }
451
452 assert!(p1 < p2);
453 assert!(p2 < p3);
454 assert!(p1 < p3);
455 p1.arena.clone()
456 };
457 assert!(a.borrow().priorities.len() == 1);
458 }
459
460 #[test]
461 fn horde() {
462 let mut v = vec![Priority::new()];
463
464 for _ in 0..1024 {
465 v.push(v[v.len() - 1].insert());
466 }
467
468 for i in 0..v.len() - 1{
469 assert!(v[i] < v[i+1])
470 }
471 }
472}