mutcursor/
lib.rs

1// Overrides for `docs.rs` links in the README. This first definition takes precedence.
2
3// fix broken links if documented without `alloc`:
4#![cfg_attr(not(feature = "alloc"), doc = "[`MutCursorVec`]: features#alloc")]
5#![cfg_attr(not(feature = "alloc"), doc = "[`MutCursorRootedVec`]: features#alloc")]
6
7// more precise links to `std`/`alloc` when possible:
8#![cfg_attr(feature = "std", doc = "[Vec]: std::vec::Vec")]
9#![cfg_attr(feature = "alloc", doc = "[Vec]: alloc::vec::Vec")]
10
11// Normal link to crate items:
12//! [`MutCursor`]: MutCursor
13//! [`top`]: MutCursor::top
14//! [`MutCursor::try_map_into_mut`]: MutCursor::try_map_into_mut
15//! [`MutCursorVec`]: MutCursorVec
16//! [`MutCursorRootedVec`]: MutCursorRootedVec
17//! [`unique`]: unique
18
19#![doc = include_str!("../README.md")]
20
21// Uses unstable features for `docs.rs`.
22// This is okay, `docs.rs` uses a nightly compiler, and doesn't need to re-build documentation later.
23// To avoid broken `docs.rs` pages with new uploads, before publishing a new version to `crates.io`,
24// always check if `cargo docs-rs` (https://crates.io/crates/cargo-docs-rs) still works,
25// (with `rustup override set nightly` set locally to a current nightly).
26#![cfg_attr(docsrs, feature(doc_cfg))]
27#![cfg_attr(docsrs, feature(clone_to_uninit))]
28
29#![no_std]
30
31#[cfg(any(test, feature = "alloc"))]
32extern crate alloc;
33#[cfg(any(test, feature = "std"))]
34#[cfg_attr(test, macro_use)]
35extern crate std;
36
37#[cfg(doc)]
38#[cfg_attr(docsrs, doc(cfg(doc)))]
39pub mod features {
40    //! Description of the crate features (not a real module).
41    //!
42    //! ## `default`
43    //! The default features of this crate include `std` and `alloc`
44    //! ## `alloc`
45    //! Enables usage of the `alloc` crate, and a public dependency on
46    //! [`stable_deref_trait`] at version `1.*`
47    //!
48    #![cfg_attr(not(feature = "alloc"), doc = "Enables the `MutCursorVec` and `MutCursorRootedVec` APIs,")]
49    #![cfg_attr(not(feature = "alloc"), doc = "as well as the `unique` module.")]
50    #![cfg_attr(feature = "alloc", doc = "Enables the [`MutCursorVec`] and [`MutCursorRootedVec`] APIs,")]
51    #![cfg_attr(feature = "alloc", doc = "as well as the [`unique`](crate::unique) module.")]
52    //! ## `std`
53    //! Enables `std` support for [`StableDeref`], so you use std-only stable pointers
54    //! without needing to depend on [`stable_deref_trait`] yourself.
55    //!
56    //! Also extends our re-implementation of [`CloneToUninit`] (used for [`make_unique`] in the
57    //! `unique` module) to support `std`-only types (`OsStr`, `Path`).
58    //!
59    #![cfg_attr(feature = "alloc", doc = "[`stable_deref_trait`]: stable_deref_trait")]
60    #![cfg_attr(feature = "alloc", doc = "[`StableDeref`]: stable_deref_trait::StableDeref")]
61    #![cfg_attr(feature = "alloc", doc = "[`make_unique`]: crate::unique::UniqueExt::make_unique")]
62    #![cfg_attr(docsrs, doc = "[`CloneToUninit`]: std::clone::CloneToUninit")]
63    //! [`stable_deref_trait`]: https://docs.rs/stable_deref_trait/1/stable_deref_trait/
64    //! [`StableDeref`]: https://docs.rs/stable_deref_trait/1/stable_deref_trait/trait.StableDeref.html
65    //! [`make_unique`]: #alloc
66    //! [`CloneToUninit`]: https://doc.rust-lang.org/nightly/std/clone/trait.CloneToUninit.html
67    use super::*;
68}
69
70use core::ptr::NonNull;
71use core::mem::MaybeUninit;
72use core::marker::PhantomData;
73
74#[cfg(feature = "alloc")]
75mod mut_cursor_vec;
76#[cfg(feature = "alloc")]
77#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
78pub use mut_cursor_vec::*;
79
80
81#[cfg(feature = "alloc")]
82#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
83pub mod unique;
84
85#[cfg(feature = "alloc")]
86mod rooted_vec;
87#[cfg(feature = "alloc")]
88#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
89pub use rooted_vec::*;
90
91/// Stores a stack of `&mut` references, only allowing access to the top element on the stack
92///
93/// The `MutCursor` stores `N` `&mut T` references, but only allows access to the [`top`](Self::top)
94pub struct MutCursor<'root, T: ?Sized + 'root, const N: usize> {
95    cnt: usize, //The last item cannot be removed, so cnt==0 means there is 1 item
96    top: usize,
97    stack: [MaybeUninit<NonNull<T>>; N],
98    phantom: PhantomData<&'root mut T>, //Variance
99}
100
101unsafe impl<'a, T, const N: usize> Sync for MutCursor<'a, T, N> where T: Sync, T: ?Sized {}
102unsafe impl<'a, T, const N: usize> Send for MutCursor<'a, T, N> where T: Send, T: ?Sized {}
103
104impl<'root, T: ?Sized + 'root, const N: usize> MutCursor<'root, T, N> {
105    /// Returns a new `MutCursor` with a reference to the specified root
106    #[inline]
107    pub fn new(root: &'root mut T) -> Self {
108        debug_assert!(N > 0);
109        let mut stack = Self {
110            cnt: 0,
111            top: 0,
112            stack: [MaybeUninit::uninit(); N],
113            phantom: PhantomData::default(),
114        };
115        unsafe{ *stack.stack.get_unchecked_mut(0) = MaybeUninit::new(NonNull::from(root)); }
116        stack
117    }
118    /// Returns a const reference from the mutable reference on the top of the stack
119    #[inline]
120    pub fn top(&self) -> &T {
121        unsafe{ self.stack.get_unchecked(self.top).assume_init().as_ref() }
122    }
123    /// Returns the mutable reference on the top of the stack 
124    #[inline]
125    pub fn top_mut(&mut self) -> &mut T {
126        unsafe{ self.top_mut_internal() }
127    }
128    /// Returns the mutable reference on the top of the stack, consuming the stack
129    #[inline]
130    pub fn into_mut(mut self) -> &'root mut T {
131        unsafe{ self.top_mut_internal() }
132    }
133    /// Consumes the stack and returns a mutable reference to an object with the `'root` lifetime,
134    /// if a closure returns `Ok`, otherwise returns the stack and a custom error value
135    ///
136    /// This method is useful when you need to call a fallible API with the node, but need the result
137    /// of the API to be in the `'root` lifetime so it can outlive the `MutCursor`.
138    /// ```
139    /// # struct TreeNode {
140    /// #   val: usize,
141    /// #   next: Option<Box<TreeNode>>
142    /// # }
143    /// # impl TreeNode {
144    /// #   fn new(count: usize) -> Self {
145    /// #     if count > 0 {
146    /// #       Self {val: count, next: Some(Box::new(Self::new(count-1)))}
147    /// #     } else {
148    /// #       Self {val: 0, next: None}
149    /// #     }
150    /// #   }
151    /// #   fn traverse(&mut self) -> Option<&mut Self> {
152    /// #     self.next.as_mut().map(|boxed| &mut **boxed)
153    /// #   }
154    /// #   fn is_leaf(&self) -> bool {
155    /// #     self.val == 0
156    /// #   }
157    /// # }
158    /// use mutcursor::MutCursor;
159    /// let mut tree = TreeNode::new(3);
160    ///
161    /// let node_stack = MutCursor::<TreeNode, 2>::new(&mut tree);
162    /// let node_ref = match node_stack.try_map_into_mut(|top_ref| {
163    ///     if top_ref.is_leaf() {
164    ///         Ok(top_ref)
165    ///     } else {
166    ///         Err(top_ref.val)
167    ///     }
168    /// }) {
169    ///     Ok(node) => node,
170    ///     Err((mut node_stack, _val)) => {
171    ///         if node_stack.depth() > 0 {
172    ///             node_stack.backtrack();
173    ///         }
174    ///         node_stack.into_mut()
175    ///     }
176    /// };
177    /// ```
178    #[inline]
179    pub fn try_map_into_mut<U, E, F>(mut self, f: F) -> Result<&'root mut U, (Self, E)>
180        where for<'r> F: FnOnce(&'r mut T) -> Result<&'r mut U, E>
181    {
182        let top_ref = unsafe{ self.top_mut_internal() };
183        match f(top_ref) {
184            Ok(r) => Ok(r),
185            Err(e) => Err((self, e))
186        }
187    }
188    /// Returns the number of excess references stored in the stack, which corresponds to the number of
189    /// times [backtrack](Self::backtrack) may be called
190    #[inline]
191    pub fn depth(&self) -> usize {
192        self.cnt
193    }
194    /// Returns the number of references the stack is capable of holding
195    #[inline]
196    pub const fn capacity(&self) -> usize {
197        N
198    }
199    /// Steps deeper into the traversal, pushing a new reference onto the top of the stack
200    ///
201    /// If the `step_f` closure returns `Some()`, the contained reference is pushed onto the stack and
202    /// this method returns `true`.  If the closure returns `None` then the stack is unmodified and this
203    /// method returns `false`.
204    ///
205    /// If the number of references in the stack exceeds the capacity, the reference at the bottom of the
206    /// stack will be lost.
207    #[inline]
208    pub fn advance<F>(&mut self, step_f: F) -> bool
209        where F: FnOnce(&mut T) -> Option<&mut T>
210    {
211        match step_f(unsafe{ self.top_mut_internal() }) {
212            Some(new_node) => {
213                unsafe{ self.push(NonNull::from(new_node)); }
214                true
215            },
216            None => false
217        }
218    }
219    /// Pops a reference from the stack, exposing the prior reference as the new [`top`](Self::top)
220    ///
221    /// This method will panic if the stack contains only 1 entry
222    #[inline]
223    pub fn backtrack(&mut self) {
224        if self.cnt < 1 {
225            panic!("MutCursor must contain valid reference")
226        }
227        if self.top < 1 {
228            self.top = N-1;
229        } else {
230            self.top -= 1;
231        }
232        self.cnt -= 1;
233    }
234    /// Private
235    #[inline]
236    unsafe fn top_mut_internal(&mut self) -> &'root mut T {
237        unsafe{ self.stack[self.top].assume_init().as_mut() }
238    }
239    /// Private
240    #[inline]
241    unsafe fn push(&mut self, t: NonNull<T>) {
242        if self.top + 1 < N {
243            self.top = self.top + 1;
244        } else {
245            self.top = 0;
246        }
247        *self.stack.get_unchecked_mut(self.top) = MaybeUninit::new(t);
248        if self.cnt < N-1 {
249            self.cnt += 1;
250        }
251    }
252}
253
254impl<'root, T: ?Sized, const N: usize> core::ops::Deref for MutCursor<'root, T, N> {
255    type Target = T;
256    fn deref(&self) -> &T {
257        self.top()
258    }
259}
260
261impl<'root, T: ?Sized, const N: usize> core::ops::DerefMut for MutCursor<'root, T, N> {
262    fn deref_mut(&mut self) -> &mut T {
263        self.top_mut()
264    }
265}
266
267#[cfg(test)]
268mod test {
269    use std::*;
270    use std::{boxed::*, vec::Vec, string::String};
271
272    use crate::*;
273
274    struct TreeNode {
275        val: usize,
276        next: Option<Box<TreeNode>>
277    }
278    impl TreeNode {
279        fn new(count: usize) -> Self {
280            if count > 0 {
281                Self {val: count, next: Some(Box::new(Self::new(count-1)))}
282            } else {
283                Self {val: 0, next: None}
284            }
285        }
286        fn traverse(&mut self) -> Option<&mut Self> {
287            self.next.as_mut().map(|boxed| &mut **boxed)
288        }
289    }
290
291    #[test]
292    fn basics() {
293        let mut tree = TreeNode::new(10);
294        let mut node_stack = MutCursor::<TreeNode, 7>::new(&mut tree);
295
296        while node_stack.advance(|node| {
297            node.traverse()
298        }) {}
299
300        assert_eq!(node_stack.top().val, 0);
301        assert_eq!(node_stack.depth(), 6);
302
303        node_stack.backtrack();
304        assert_eq!(node_stack.top().val, 1);
305        assert_eq!(node_stack.depth(), 5);
306
307        node_stack.backtrack();
308        node_stack.backtrack();
309        node_stack.backtrack();
310        assert_eq!(node_stack.top().val, 4);
311        assert_eq!(node_stack.depth(), 2);
312
313        while node_stack.advance(|node| {
314            node.traverse()
315        }) {}
316        assert_eq!(node_stack.top().val, 0);
317        assert_eq!(node_stack.depth(), 6);
318
319        node_stack.backtrack();
320        node_stack.backtrack();
321        node_stack.backtrack();
322        node_stack.backtrack();
323        node_stack.backtrack();
324        node_stack.backtrack();
325        assert_eq!(node_stack.top().val, 6);
326        assert_eq!(node_stack.depth(), 0);
327
328        assert_eq!(node_stack.into_mut().val, 6);
329    }
330
331    #[test]
332    fn try_to_escape_map_closure() {
333
334        let mut tree = TreeNode::new(3);
335
336        // 1-element node_stack is just a more restrictive `&mut`
337        let node_stack = MutCursor::<TreeNode, 1>::new(&mut tree);
338
339        let mut _poison: &mut TreeNode;
340        match node_stack.try_map_into_mut(|node| -> Result<&mut TreeNode, &mut TreeNode> {
341            //_poison = node; //Good.  Can't escape that way
342
343            //Err(node) //Good.  Can't escape that way either
344
345            Ok(node)
346        }) {
347            Ok(_r) => {},
348            Err(_e) => {}
349        }
350    }
351
352    /// Demonstrates an API soundness issue that was problematic in a prior version of the crate
353    /// See https://github.com/luketpeterson/mutcursor/issues/1
354    #[test]
355    fn try_to_alias_mut_with_advance() {
356        let mut x = 1;
357        let mut c = MutCursor::<_, 1>::new(&mut x);
358        #[allow(unused_mut)]
359        let mut _r1: Option<&mut i32> = None;
360        c.advance(|_r| {
361            // _r1 = Some(_r); //Good.  Can't escape the closure anymore
362            None
363        });
364        #[allow(unused_mut)]
365        let mut _r2: Option<&mut i32> = None;
366        c.advance(|_r| {
367            // _r2 = Some(_r); //Good.  Can't escape the closure anymore
368            None
369        });
370        // Have we aliased the mutable references?!?
371    }
372
373    /// Demonstrates an API soundness issue that was problematic in a prior version of the crate
374    /// See https://github.com/luketpeterson/mutcursor/issues/1
375    #[test]
376    fn test_variance() {
377        let mut dummy = String::new();
378        let mut dummy_ref = &mut dummy;
379        #[allow(unused_mut)]
380        let mut _c = MutCursor::<_, 1>::new(&mut dummy_ref);
381        {
382            #[allow(unused_mut)]
383            let mut _hello = String::from("hello!");
384            // covariance allows turning `MutCursor<'a, &'long mut String, 1>` into
385            // `MutCursor<'a, &'short mut String, 1>`, where the `'short` lifetime now
386            // can be shorter than the original lifetime of `dummy_ref`
387            // 
388            // (I believe the coercion actually already happens when `c` gets assigned above,
389            // but even without that, you could imagine inserting a `let c = c as _;` step
390            // that makes this more explicit)
391
392            // *_c.top_mut() = &mut _hello; // this sets `dummy_ref` to point to `hello` //Good.  Can't assign to reference shorter lifetime anymore!
393            println!("{}", dummy_ref); // hello!
394        }
395        println!("{}", dummy_ref); // <garbage> (use-after-free)
396    }
397
398    use std::{thread, thread::ScopedJoinHandle};
399    #[test]
400    fn multi_thread_test() {
401
402        let thread_cnt = 128;
403        let mut data: Vec<TreeNode> = vec![];
404        for _ in 0..thread_cnt {
405            data.push(TreeNode::new(10));
406        }
407        let mut data_refs: Vec<&mut TreeNode> = data.iter_mut().collect();
408
409        thread::scope(|scope| {
410
411            let mut threads: Vec<ScopedJoinHandle<()>> = Vec::with_capacity(thread_cnt);
412
413            //Spawn all the threads
414            for _ in 0..thread_cnt {
415                let tree = data_refs.pop().unwrap();
416                let mut node_stack = MutCursor::<TreeNode, 7>::new(tree);
417
418                let thread = scope.spawn(move || {
419
420                    while node_stack.advance(|node| {
421                        node.traverse()
422                    }) {}
423
424                    assert_eq!(node_stack.top().val, 0);
425                    assert_eq!(node_stack.depth(), 6);
426
427                    node_stack.backtrack();
428                    assert_eq!(node_stack.top().val, 1);
429                    assert_eq!(node_stack.depth(), 5);
430
431                    node_stack.backtrack();
432                    node_stack.backtrack();
433                    node_stack.backtrack();
434                    assert_eq!(node_stack.top().val, 4);
435                    assert_eq!(node_stack.depth(), 2);
436
437                    while node_stack.advance(|node| {
438                        node.traverse()
439                    }) {}
440                    assert_eq!(node_stack.top().val, 0);
441                    assert_eq!(node_stack.depth(), 6);
442
443                    node_stack.backtrack();
444                    node_stack.backtrack();
445                    node_stack.backtrack();
446                    node_stack.backtrack();
447                    node_stack.backtrack();
448                    node_stack.backtrack();
449                    assert_eq!(node_stack.top().val, 6);
450                    assert_eq!(node_stack.depth(), 0);
451
452                    assert_eq!(node_stack.into_mut().val, 6);
453                });
454                threads.push(thread);
455            };
456
457            //Wait for them to finish
458            for thread in threads {
459                thread.join().unwrap();
460            }
461        });
462    }
463}