1#![cfg_attr(not(feature = "alloc"), doc = "[`MutCursorVec`]: features#alloc")]
5#![cfg_attr(not(feature = "alloc"), doc = "[`MutCursorRootedVec`]: features#alloc")]
6
7#![cfg_attr(feature = "std", doc = "[Vec]: std::vec::Vec")]
9#![cfg_attr(feature = "alloc", doc = "[Vec]: alloc::vec::Vec")]
10
11#![doc = include_str!("../README.md")]
20
21#![cfg_attr(docsrs, feature(doc_cfg))]
27#![cfg_attr(docsrs, feature(clone_to_uninit))]
28
29#![no_std]
30
31#[cfg(any(test, feature = "alloc"))]
32extern crate alloc;
33#[cfg(any(test, feature = "std"))]
34#[cfg_attr(test, macro_use)]
35extern crate std;
36
37#[cfg(doc)]
38#[cfg_attr(docsrs, doc(cfg(doc)))]
39pub mod features {
40 #![cfg_attr(not(feature = "alloc"), doc = "Enables the `MutCursorVec` and `MutCursorRootedVec` APIs,")]
49 #![cfg_attr(not(feature = "alloc"), doc = "as well as the `unique` module.")]
50 #![cfg_attr(feature = "alloc", doc = "Enables the [`MutCursorVec`] and [`MutCursorRootedVec`] APIs,")]
51 # module.")]
52 #![cfg_attr(feature = "alloc", doc = "[`stable_deref_trait`]: stable_deref_trait")]
60 #![cfg_attr(feature = "alloc", doc = "[`StableDeref`]: stable_deref_trait::StableDeref")]
61 #![cfg_attr(feature = "alloc", doc = "[`make_unique`]: crate::unique::UniqueExt::make_unique")]
62 #![cfg_attr(docsrs, doc = "[`CloneToUninit`]: std::clone::CloneToUninit")]
63 use super::*;
68}
69
70use core::ptr::NonNull;
71use core::mem::MaybeUninit;
72use core::marker::PhantomData;
73
74#[cfg(feature = "alloc")]
75mod mut_cursor_vec;
76#[cfg(feature = "alloc")]
77#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
78pub use mut_cursor_vec::*;
79
80
81#[cfg(feature = "alloc")]
82#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
83pub mod unique;
84
85#[cfg(feature = "alloc")]
86mod rooted_vec;
87#[cfg(feature = "alloc")]
88#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
89pub use rooted_vec::*;
90
91pub struct MutCursor<'root, T: ?Sized + 'root, const N: usize> {
95 cnt: usize, top: usize,
97 stack: [MaybeUninit<NonNull<T>>; N],
98 phantom: PhantomData<&'root mut T>, }
100
101unsafe impl<'a, T, const N: usize> Sync for MutCursor<'a, T, N> where T: Sync, T: ?Sized {}
102unsafe impl<'a, T, const N: usize> Send for MutCursor<'a, T, N> where T: Send, T: ?Sized {}
103
104impl<'root, T: ?Sized + 'root, const N: usize> MutCursor<'root, T, N> {
105 #[inline]
107 pub fn new(root: &'root mut T) -> Self {
108 debug_assert!(N > 0);
109 let mut stack = Self {
110 cnt: 0,
111 top: 0,
112 stack: [MaybeUninit::uninit(); N],
113 phantom: PhantomData::default(),
114 };
115 unsafe{ *stack.stack.get_unchecked_mut(0) = MaybeUninit::new(NonNull::from(root)); }
116 stack
117 }
118 #[inline]
120 pub fn top(&self) -> &T {
121 unsafe{ self.stack.get_unchecked(self.top).assume_init().as_ref() }
122 }
123 #[inline]
125 pub fn top_mut(&mut self) -> &mut T {
126 unsafe{ self.top_mut_internal() }
127 }
128 #[inline]
130 pub fn into_mut(mut self) -> &'root mut T {
131 unsafe{ self.top_mut_internal() }
132 }
133 #[inline]
179 pub fn try_map_into_mut<U, E, F>(mut self, f: F) -> Result<&'root mut U, (Self, E)>
180 where for<'r> F: FnOnce(&'r mut T) -> Result<&'r mut U, E>
181 {
182 let top_ref = unsafe{ self.top_mut_internal() };
183 match f(top_ref) {
184 Ok(r) => Ok(r),
185 Err(e) => Err((self, e))
186 }
187 }
188 #[inline]
191 pub fn depth(&self) -> usize {
192 self.cnt
193 }
194 #[inline]
196 pub const fn capacity(&self) -> usize {
197 N
198 }
199 #[inline]
208 pub fn advance<F>(&mut self, step_f: F) -> bool
209 where F: FnOnce(&mut T) -> Option<&mut T>
210 {
211 match step_f(unsafe{ self.top_mut_internal() }) {
212 Some(new_node) => {
213 unsafe{ self.push(NonNull::from(new_node)); }
214 true
215 },
216 None => false
217 }
218 }
219 #[inline]
223 pub fn backtrack(&mut self) {
224 if self.cnt < 1 {
225 panic!("MutCursor must contain valid reference")
226 }
227 if self.top < 1 {
228 self.top = N-1;
229 } else {
230 self.top -= 1;
231 }
232 self.cnt -= 1;
233 }
234 #[inline]
236 unsafe fn top_mut_internal(&mut self) -> &'root mut T {
237 unsafe{ self.stack[self.top].assume_init().as_mut() }
238 }
239 #[inline]
241 unsafe fn push(&mut self, t: NonNull<T>) {
242 if self.top + 1 < N {
243 self.top = self.top + 1;
244 } else {
245 self.top = 0;
246 }
247 *self.stack.get_unchecked_mut(self.top) = MaybeUninit::new(t);
248 if self.cnt < N-1 {
249 self.cnt += 1;
250 }
251 }
252}
253
254impl<'root, T: ?Sized, const N: usize> core::ops::Deref for MutCursor<'root, T, N> {
255 type Target = T;
256 fn deref(&self) -> &T {
257 self.top()
258 }
259}
260
261impl<'root, T: ?Sized, const N: usize> core::ops::DerefMut for MutCursor<'root, T, N> {
262 fn deref_mut(&mut self) -> &mut T {
263 self.top_mut()
264 }
265}
266
267#[cfg(test)]
268mod test {
269 use std::*;
270 use std::{boxed::*, vec::Vec, string::String};
271
272 use crate::*;
273
274 struct TreeNode {
275 val: usize,
276 next: Option<Box<TreeNode>>
277 }
278 impl TreeNode {
279 fn new(count: usize) -> Self {
280 if count > 0 {
281 Self {val: count, next: Some(Box::new(Self::new(count-1)))}
282 } else {
283 Self {val: 0, next: None}
284 }
285 }
286 fn traverse(&mut self) -> Option<&mut Self> {
287 self.next.as_mut().map(|boxed| &mut **boxed)
288 }
289 }
290
291 #[test]
292 fn basics() {
293 let mut tree = TreeNode::new(10);
294 let mut node_stack = MutCursor::<TreeNode, 7>::new(&mut tree);
295
296 while node_stack.advance(|node| {
297 node.traverse()
298 }) {}
299
300 assert_eq!(node_stack.top().val, 0);
301 assert_eq!(node_stack.depth(), 6);
302
303 node_stack.backtrack();
304 assert_eq!(node_stack.top().val, 1);
305 assert_eq!(node_stack.depth(), 5);
306
307 node_stack.backtrack();
308 node_stack.backtrack();
309 node_stack.backtrack();
310 assert_eq!(node_stack.top().val, 4);
311 assert_eq!(node_stack.depth(), 2);
312
313 while node_stack.advance(|node| {
314 node.traverse()
315 }) {}
316 assert_eq!(node_stack.top().val, 0);
317 assert_eq!(node_stack.depth(), 6);
318
319 node_stack.backtrack();
320 node_stack.backtrack();
321 node_stack.backtrack();
322 node_stack.backtrack();
323 node_stack.backtrack();
324 node_stack.backtrack();
325 assert_eq!(node_stack.top().val, 6);
326 assert_eq!(node_stack.depth(), 0);
327
328 assert_eq!(node_stack.into_mut().val, 6);
329 }
330
331 #[test]
332 fn try_to_escape_map_closure() {
333
334 let mut tree = TreeNode::new(3);
335
336 let node_stack = MutCursor::<TreeNode, 1>::new(&mut tree);
338
339 let mut _poison: &mut TreeNode;
340 match node_stack.try_map_into_mut(|node| -> Result<&mut TreeNode, &mut TreeNode> {
341 Ok(node)
346 }) {
347 Ok(_r) => {},
348 Err(_e) => {}
349 }
350 }
351
352 #[test]
355 fn try_to_alias_mut_with_advance() {
356 let mut x = 1;
357 let mut c = MutCursor::<_, 1>::new(&mut x);
358 #[allow(unused_mut)]
359 let mut _r1: Option<&mut i32> = None;
360 c.advance(|_r| {
361 None
363 });
364 #[allow(unused_mut)]
365 let mut _r2: Option<&mut i32> = None;
366 c.advance(|_r| {
367 None
369 });
370 }
372
373 #[test]
376 fn test_variance() {
377 let mut dummy = String::new();
378 let mut dummy_ref = &mut dummy;
379 #[allow(unused_mut)]
380 let mut _c = MutCursor::<_, 1>::new(&mut dummy_ref);
381 {
382 #[allow(unused_mut)]
383 let mut _hello = String::from("hello!");
384 println!("{}", dummy_ref); }
395 println!("{}", dummy_ref); }
397
398 use std::{thread, thread::ScopedJoinHandle};
399 #[test]
400 fn multi_thread_test() {
401
402 let thread_cnt = 128;
403 let mut data: Vec<TreeNode> = vec![];
404 for _ in 0..thread_cnt {
405 data.push(TreeNode::new(10));
406 }
407 let mut data_refs: Vec<&mut TreeNode> = data.iter_mut().collect();
408
409 thread::scope(|scope| {
410
411 let mut threads: Vec<ScopedJoinHandle<()>> = Vec::with_capacity(thread_cnt);
412
413 for _ in 0..thread_cnt {
415 let tree = data_refs.pop().unwrap();
416 let mut node_stack = MutCursor::<TreeNode, 7>::new(tree);
417
418 let thread = scope.spawn(move || {
419
420 while node_stack.advance(|node| {
421 node.traverse()
422 }) {}
423
424 assert_eq!(node_stack.top().val, 0);
425 assert_eq!(node_stack.depth(), 6);
426
427 node_stack.backtrack();
428 assert_eq!(node_stack.top().val, 1);
429 assert_eq!(node_stack.depth(), 5);
430
431 node_stack.backtrack();
432 node_stack.backtrack();
433 node_stack.backtrack();
434 assert_eq!(node_stack.top().val, 4);
435 assert_eq!(node_stack.depth(), 2);
436
437 while node_stack.advance(|node| {
438 node.traverse()
439 }) {}
440 assert_eq!(node_stack.top().val, 0);
441 assert_eq!(node_stack.depth(), 6);
442
443 node_stack.backtrack();
444 node_stack.backtrack();
445 node_stack.backtrack();
446 node_stack.backtrack();
447 node_stack.backtrack();
448 node_stack.backtrack();
449 assert_eq!(node_stack.top().val, 6);
450 assert_eq!(node_stack.depth(), 0);
451
452 assert_eq!(node_stack.into_mut().val, 6);
453 });
454 threads.push(thread);
455 };
456
457 for thread in threads {
459 thread.join().unwrap();
460 }
461 });
462 }
463}