1use std::{cell::RefCell, collections::VecDeque, sync::Arc};
2
3use crate::sync::{AtomicU64, Mutex};
4use either::Either;
5use loro_common::{
6 ContainerID, Counter, CounterSpan, HasIdSpan, IdSpan, LoroError, LoroResult, LoroValue, PeerID,
7};
8use parking_lot::lock_api::ReentrantMutex;
9use rustc_hash::{FxHashMap, FxHashSet};
10use tracing::{debug_span, info_span, instrument};
11
12use crate::{
13 change::{get_sys_timestamp, Timestamp},
14 cursor::{AbsolutePosition, Cursor},
15 delta::TreeExternalDiff,
16 event::{Diff, EventTriggerKind},
17 version::Frontiers,
18 ContainerDiff, DiffEvent, DocDiff, LoroDoc, Subscription,
19};
20
21#[derive(Debug, Clone, Default)]
25pub struct DiffBatch {
26 pub cid_to_events: FxHashMap<ContainerID, Diff>,
27 pub order: Vec<ContainerID>,
28}
29
30impl DiffBatch {
31 pub fn new(diff: Vec<DocDiff>) -> Self {
32 let mut map: FxHashMap<ContainerID, Diff> = Default::default();
33 let mut order: Vec<ContainerID> = Vec::with_capacity(diff.len());
34 for d in diff.into_iter() {
35 for item in d.diff.into_iter() {
36 let old = map.insert(item.id.clone(), item.diff);
37 assert!(old.is_none(), "Duplicate container ID in diff events");
38 order.push(item.id.clone());
39 }
40 }
41
42 Self {
43 cid_to_events: map,
44 order,
45 }
46 }
47
48 pub fn compose(&mut self, other: &Self) {
49 if other.cid_to_events.is_empty() {
50 return;
51 }
52
53 for (id, diff) in other.iter() {
54 if let Some(this_diff) = self.cid_to_events.get_mut(id) {
55 this_diff.compose_ref(diff);
56 } else {
57 self.cid_to_events.insert(id.clone(), diff.clone());
58 self.order.push(id.clone());
59 }
60 }
61 }
62
63 pub fn transform(&mut self, other: &Self, left_priority: bool) {
64 if other.cid_to_events.is_empty() || self.cid_to_events.is_empty() {
65 return;
66 }
67
68 for (idx, diff) in self.cid_to_events.iter_mut() {
69 if let Some(b_diff) = other.cid_to_events.get(idx) {
70 diff.transform(b_diff, left_priority);
71 }
72 }
73 }
74
75 pub fn clear(&mut self) {
76 self.cid_to_events.clear();
77 self.order.clear();
78 }
79
80 pub fn iter(&self) -> impl Iterator<Item = (&ContainerID, &Diff)> + '_ {
81 self.order
82 .iter()
83 .map(|cid| (cid, self.cid_to_events.get(cid).unwrap()))
84 }
85
86 #[allow(clippy::should_implement_trait)]
87 pub fn into_iter(self) -> impl Iterator<Item = (ContainerID, Diff)> {
88 let mut cid_to_events = self.cid_to_events;
89 self.order.into_iter().map(move |cid| {
90 let d = cid_to_events.remove(&cid).unwrap();
91 (cid, d)
92 })
93 }
94}
95
96fn transform_cursor(
97 cursor_with_pos: &mut CursorWithPos,
98 remote_diff: &DiffBatch,
99 doc: &LoroDoc,
100 container_remap: &FxHashMap<ContainerID, ContainerID>,
101) {
102 let mut container_changed = false;
103 let mut cid = &cursor_with_pos.cursor.container;
104 while let Some(new_cid) = container_remap.get(cid) {
105 cid = new_cid;
106 container_changed = true;
107 }
108
109 if cursor_with_pos.cursor.id.is_none() {
110 if container_changed {
112 cursor_with_pos.cursor.container = cid.clone();
113 }
114 return;
115 }
116
117 if let Some(diff) = remote_diff.cid_to_events.get(cid) {
118 let new_pos = diff.transform_cursor(cursor_with_pos.pos.pos, false);
119 cursor_with_pos.pos.pos = new_pos;
120 };
121
122 let new_pos = cursor_with_pos.pos.pos;
123 match doc.get_handler(cid.clone()).unwrap() {
124 crate::handler::Handler::Text(h) => {
125 let Some(new_cursor) = h.get_cursor_internal(new_pos, cursor_with_pos.pos.side, false)
126 else {
127 return;
128 };
129
130 cursor_with_pos.cursor = new_cursor;
131 }
132 crate::handler::Handler::List(h) => {
133 let Some(new_cursor) = h.get_cursor(new_pos, cursor_with_pos.pos.side) else {
134 return;
135 };
136
137 cursor_with_pos.cursor = new_cursor;
138 }
139 crate::handler::Handler::MovableList(h) => {
140 let Some(new_cursor) = h.get_cursor(new_pos, cursor_with_pos.pos.side) else {
141 return;
142 };
143
144 cursor_with_pos.cursor = new_cursor;
145 }
146 crate::handler::Handler::Map(_) => {}
147 crate::handler::Handler::Tree(_) => {}
148 crate::handler::Handler::Unknown(_) => {}
149 #[cfg(feature = "counter")]
150 crate::handler::Handler::Counter(_) => {}
151 }
152}
153
154pub struct UndoManager {
161 peer: Arc<AtomicU64>,
162 container_remap: Arc<Mutex<FxHashMap<ContainerID, ContainerID>>>,
163 inner: Arc<parking_lot::ReentrantMutex<RefCell<UndoManagerInner>>>,
164 _peer_id_change_sub: Subscription,
165 _undo_sub: Subscription,
166 doc: LoroDoc,
167}
168
169impl std::fmt::Debug for UndoManager {
170 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
171 f.debug_struct("UndoManager")
172 .field("peer", &self.peer)
173 .field("container_remap", &self.container_remap)
174 .field("inner", &self.inner)
175 .finish()
176 }
177}
178
179#[derive(Debug, Clone, Copy, PartialEq, Eq)]
180pub enum UndoOrRedo {
181 Undo,
182 Redo,
183}
184
185impl UndoOrRedo {
186 fn opposite(&self) -> UndoOrRedo {
187 match self {
188 Self::Undo => Self::Redo,
189 Self::Redo => Self::Undo,
190 }
191 }
192}
193
194pub type OnPush = Box<
197 dyn for<'a> Fn(UndoOrRedo, CounterSpan, Option<DiffEvent<'a>>) -> UndoItemMeta + Send + Sync,
198>;
199pub type OnPop = Box<dyn Fn(UndoOrRedo, CounterSpan, UndoItemMeta) + Send + Sync>;
200
201struct UndoManagerInner {
202 next_counter: Option<Counter>,
203 undo_stack: Stack,
204 redo_stack: Stack,
205 processing_undo: bool,
206 last_undo_time: i64,
207 merge_interval_in_ms: i64,
208 max_stack_size: usize,
209 exclude_origin_prefixes: Vec<Box<str>>,
210 last_popped_selection: Option<Vec<CursorWithPos>>,
211 on_push: Option<OnPush>,
212 on_pop: Option<OnPop>,
213 group: Option<UndoGroup>,
214}
215
216impl std::fmt::Debug for UndoManagerInner {
217 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
218 f.debug_struct("UndoManagerInner")
219 .field("latest_counter", &self.next_counter)
220 .field("undo_stack", &self.undo_stack)
221 .field("redo_stack", &self.redo_stack)
222 .field("processing_undo", &self.processing_undo)
223 .field("last_undo_time", &self.last_undo_time)
224 .field("merge_interval", &self.merge_interval_in_ms)
225 .field("max_stack_size", &self.max_stack_size)
226 .field("exclude_origin_prefixes", &self.exclude_origin_prefixes)
227 .field("group", &self.group)
228 .finish()
229 }
230}
231
232#[derive(Debug, Clone, Default)]
233struct UndoGroup {
234 start_counter: Counter,
235 affected_cids: FxHashSet<ContainerID>,
236}
237
238impl UndoGroup {
239 pub fn new(start_counter: Counter) -> Self {
240 Self {
241 start_counter,
242 affected_cids: Default::default(),
243 }
244 }
245}
246
247#[derive(Debug)]
248struct Stack {
249 stack: VecDeque<(VecDeque<StackItem>, Arc<Mutex<DiffBatch>>)>,
250 size: usize,
251}
252
253#[derive(Debug, Clone)]
254struct StackItem {
255 span: CounterSpan,
256 meta: UndoItemMeta,
257}
258
259#[derive(Debug, Default, Clone)]
264pub struct UndoItemMeta {
265 pub value: LoroValue,
266 pub cursors: Vec<CursorWithPos>,
267}
268
269#[derive(Debug, Clone)]
270pub struct CursorWithPos {
271 pub cursor: Cursor,
272 pub pos: AbsolutePosition,
273}
274
275impl UndoItemMeta {
276 pub fn new() -> Self {
277 Self {
278 value: LoroValue::Null,
279 cursors: Default::default(),
280 }
281 }
282
283 pub fn add_cursor(&mut self, cursor: &Cursor) {
288 self.cursors.push(CursorWithPos {
289 cursor: cursor.clone(),
290 pos: AbsolutePosition {
291 pos: cursor.origin_pos,
292 side: cursor.side,
293 },
294 });
295 }
296
297 pub fn set_value(&mut self, value: LoroValue) {
298 self.value = value;
299 }
300}
301
302impl Stack {
303 pub fn new() -> Self {
304 let mut stack = VecDeque::new();
305 stack.push_back((VecDeque::new(), Arc::new(Mutex::new(Default::default()))));
306 Stack { stack, size: 0 }
307 }
308
309 fn peek_top_meta(&self) -> Option<UndoItemMeta> {
313 if self.is_empty() {
314 return None;
315 }
316
317 for (items, _) in self.stack.iter().rev() {
318 if let Some(item) = items.back() {
319 return Some(item.meta.clone());
320 }
321 }
322
323 None
324 }
325
326 pub fn pop(&mut self) -> Option<(StackItem, Arc<Mutex<DiffBatch>>)> {
327 while self.stack.back().unwrap().0.is_empty() && self.stack.len() > 1 {
328 let (_, diff) = self.stack.pop_back().unwrap();
329 let diff = diff.lock();
330 if !diff.cid_to_events.is_empty() {
331 self.stack.back_mut().unwrap().1.lock().compose(&diff);
332 }
333 }
334
335 if self.stack.len() == 1 && self.stack.back().unwrap().0.is_empty() {
336 self.stack.back_mut().unwrap().1.lock().clear();
338 return None;
339 }
340
341 self.size -= 1;
342 let last = self.stack.back_mut().unwrap();
343 last.0.pop_back().map(|x| (x, last.1.clone()))
344 }
348
349 pub fn push(&mut self, span: CounterSpan, meta: UndoItemMeta) {
350 self.push_with_merge(span, meta, false, None)
351 }
352
353 pub fn push_with_merge(
354 &mut self,
355 span: CounterSpan,
356 meta: UndoItemMeta,
357 can_merge: bool,
358 group: Option<&UndoGroup>,
359 ) {
360 let last = self.stack.back_mut().unwrap();
361 let last_remote_diff = last.1.lock();
362
363 let is_disjoint_group = group.is_some_and(|g| {
365 g.affected_cids.iter().all(|cid| {
366 last_remote_diff
367 .cid_to_events
368 .get(cid)
369 .is_none_or(|diff| diff.is_empty())
370 })
371 });
372
373 let should_create_new_entry =
375 !last_remote_diff.cid_to_events.is_empty() && !is_disjoint_group;
376
377 if should_create_new_entry {
378 drop(last_remote_diff);
380 let mut v = VecDeque::new();
381 v.push_back(StackItem { span, meta });
382 self.stack
383 .push_back((v, Arc::new(Mutex::new(DiffBatch::default()))));
384 self.size += 1;
385 return;
386 }
387
388 if can_merge {
390 if let Some(last_span) = last.0.back_mut() {
391 if last_span.span.end == span.start {
392 last_span.span.end = span.end;
394 return;
395 }
396 }
397 }
398
399 self.size += 1;
401 last.0.push_back(StackItem { span, meta });
402 }
403
404 pub fn compose_remote_event(&mut self, diff: &[&ContainerDiff]) {
405 if self.is_empty() {
406 return;
407 }
408
409 let remote_diff = &mut self.stack.back_mut().unwrap().1;
410 let mut remote_diff = remote_diff.lock();
411 for e in diff {
412 if let Some(d) = remote_diff.cid_to_events.get_mut(&e.id) {
413 d.compose_ref(&e.diff);
414 } else {
415 remote_diff
416 .cid_to_events
417 .insert(e.id.clone(), e.diff.clone());
418 remote_diff.order.push(e.id.clone());
419 }
420 }
421 }
422
423 pub fn transform_based_on_this_delta(&mut self, diff: &DiffBatch) {
424 if self.is_empty() {
425 return;
426 }
427 let remote_diff = &mut self.stack.back_mut().unwrap().1;
428 remote_diff.lock().transform(diff, false);
429 }
430
431 pub fn clear(&mut self) {
432 self.stack = VecDeque::new();
433 self.stack.push_back((VecDeque::new(), Default::default()));
434 self.size = 0;
435 }
436
437 pub fn is_empty(&self) -> bool {
438 self.size == 0
439 }
440
441 pub fn len(&self) -> usize {
442 self.size
443 }
444
445 fn pop_front(&mut self) {
446 if self.is_empty() {
447 return;
448 }
449
450 self.size -= 1;
451 let first = self.stack.front_mut().unwrap();
452 let f = first.0.pop_front();
453 assert!(f.is_some());
454 if first.0.is_empty() {
455 self.stack.pop_front();
456 }
457 }
458
459 fn set_top_meta(&mut self, meta: UndoItemMeta) {
460 let Some(top) = self.stack.back_mut() else {
461 return;
462 };
463 let Some(last) = top.0.back_mut() else {
464 return;
465 };
466 last.meta = meta;
467 }
468}
469
470impl Default for Stack {
471 fn default() -> Self {
472 Stack::new()
473 }
474}
475
476impl UndoManagerInner {
477 fn new(last_counter: Counter) -> Self {
478 Self {
479 next_counter: Some(last_counter),
480 undo_stack: Default::default(),
481 redo_stack: Default::default(),
482 processing_undo: false,
483 merge_interval_in_ms: 0,
484 last_undo_time: 0,
485 max_stack_size: usize::MAX,
486 exclude_origin_prefixes: vec![],
487 last_popped_selection: None,
488 on_pop: None,
489 on_push: None,
490 group: None,
491 }
492 }
493
494 fn is_disjoint_with_group(&self, diff: &[&ContainerDiff]) -> bool {
497 let Some(group) = &self.group else {
498 return false;
499 };
500
501 diff.iter().all(|d| !group.affected_cids.contains(&d.id))
502 }
503
504 fn record_checkpoint(this: &RefCell<Self>, latest_counter: Counter, event: Option<DiffEvent>) {
505 let previous_counter = this.borrow().next_counter;
506
507 if Some(latest_counter) == this.borrow().next_counter {
508 return;
509 }
510
511 if this.borrow().next_counter.is_none() {
512 this.borrow_mut().next_counter = Some(latest_counter);
513 return;
514 }
515
516 if let Some(group) = &mut this.borrow_mut().group {
517 event.iter().for_each(|e| {
518 e.events.iter().for_each(|e| {
519 group.affected_cids.insert(e.id.clone());
520 })
521 });
522 }
523
524 let now = get_sys_timestamp() as Timestamp;
525 let span = CounterSpan::new(this.borrow().next_counter.unwrap(), latest_counter);
526 let meta = this
527 .borrow()
528 .on_push
529 .as_ref()
530 .map(|x| x(UndoOrRedo::Undo, span, event))
531 .unwrap_or_default();
532
533 let mut this = this.borrow_mut();
534 let this: &mut Self = &mut this;
535 let in_merge_interval = now - this.last_undo_time < this.merge_interval_in_ms;
537
538 let group_should_merge = this.group.is_some()
541 && match (
542 previous_counter,
543 this.group.as_ref().map(|g| g.start_counter),
544 ) {
545 (Some(previous), Some(active)) => previous != active,
546 _ => true,
547 };
548
549 let should_merge = !this.undo_stack.is_empty() && (in_merge_interval || group_should_merge);
550
551 if should_merge {
552 this.undo_stack
553 .push_with_merge(span, meta, true, this.group.as_ref());
554 } else {
555 this.last_undo_time = now;
556 this.undo_stack.push(span, meta);
557 }
558
559 this.next_counter = Some(latest_counter);
560 this.redo_stack.clear();
561 while this.undo_stack.len() > this.max_stack_size {
562 this.undo_stack.pop_front();
563 }
564 }
565}
566
567fn get_counter_end(doc: &LoroDoc, peer: PeerID) -> Counter {
568 doc.oplog().lock().vv().get(&peer).cloned().unwrap_or(0)
569}
570
571impl UndoManager {
572 pub fn new(doc: &LoroDoc) -> Self {
573 let peer = Arc::new(AtomicU64::new(doc.peer_id()));
574 let peer_clone = peer.clone();
575 let peer_clone2 = peer.clone();
576 let inner = Arc::new(ReentrantMutex::new(RefCell::new(UndoManagerInner::new(
577 get_counter_end(doc, doc.peer_id()),
578 ))));
579 let inner_clone = inner.clone();
580 let inner_clone2 = inner.clone();
581 let remap_containers = Arc::new(Mutex::new(FxHashMap::default()));
582 let remap_containers_clone = remap_containers.clone();
583 let undo_sub = doc.subscribe_root(Arc::new(move |event| match event.event_meta.by {
584 EventTriggerKind::Local => {
585 let lock = inner_clone.lock();
588 if lock.borrow().processing_undo {
589 return;
590 }
591 if let Some(id) = event
592 .event_meta
593 .to
594 .iter()
595 .find(|x| x.peer == peer_clone.load(std::sync::atomic::Ordering::Relaxed))
596 {
597 let should_exclude = lock
598 .borrow()
599 .exclude_origin_prefixes
600 .iter()
601 .any(|x| event.event_meta.origin.starts_with(&**x));
602 if should_exclude {
603 let mut inner = lock.borrow_mut();
607 inner.undo_stack.compose_remote_event(event.events);
608 inner.redo_stack.compose_remote_event(event.events);
609 inner.next_counter = Some(id.counter + 1);
610 } else {
611 UndoManagerInner::record_checkpoint(&lock, id.counter + 1, Some(event));
612 }
613 }
614 }
615 EventTriggerKind::Import => {
616 let lock = inner_clone.lock();
617 let mut inner = lock.borrow_mut();
618
619 for e in event.events {
620 if let Diff::Tree(tree) = &e.diff {
621 for item in &tree.diff {
622 let target = item.target;
623 if let TreeExternalDiff::Create { .. } = &item.action {
624 remap_containers_clone
627 .lock()
628 .remove(&target.associated_meta_container());
629 }
630 }
631 }
632 }
633
634 let is_import_disjoint = inner.is_disjoint_with_group(event.events);
635
636 inner.undo_stack.compose_remote_event(event.events);
637 inner.redo_stack.compose_remote_event(event.events);
638
639 if !is_import_disjoint {
642 inner.group = None;
643 }
644 }
645 EventTriggerKind::Checkout => {
646 let lock = inner_clone.lock();
647 let mut inner = lock.borrow_mut();
648 inner.undo_stack.clear();
649 inner.redo_stack.clear();
650 inner.next_counter = None;
651 }
652 }));
653
654 let sub = doc.subscribe_peer_id_change(Box::new(move |id| {
655 let lock = inner_clone2.lock();
656 let mut inner = lock.borrow_mut();
657 inner.undo_stack.clear();
658 inner.redo_stack.clear();
659 inner.next_counter = Some(id.counter);
660 peer_clone2.store(id.peer, std::sync::atomic::Ordering::Relaxed);
661 true
662 }));
663
664 UndoManager {
665 peer,
666 container_remap: remap_containers,
667 inner,
668 _peer_id_change_sub: sub,
669 _undo_sub: undo_sub,
670 doc: doc.clone(),
671 }
672 }
673
674 pub fn group_start(&self) -> LoroResult<()> {
675 let lock = self.inner.lock();
676 let mut inner = lock.borrow_mut();
677
678 if inner.group.is_some() {
679 return Err(LoroError::UndoGroupAlreadyStarted);
680 }
681
682 inner.group =
683 Some(UndoGroup::new(inner.next_counter.ok_or_else(|| {
684 LoroError::Unknown("UndoManager is not ready".into())
685 })?));
686
687 Ok(())
688 }
689
690 pub fn group_end(&self) {
691 self.inner.lock().borrow_mut().group = None;
692 }
693
694 pub fn peer(&self) -> PeerID {
695 self.peer.load(std::sync::atomic::Ordering::Relaxed)
696 }
697
698 pub fn set_merge_interval(&self, interval: i64) {
699 self.inner.lock().borrow_mut().merge_interval_in_ms = interval;
700 }
701
702 pub fn set_max_undo_steps(&self, size: usize) {
703 self.inner.lock().borrow_mut().max_stack_size = size;
704 }
705
706 pub fn add_exclude_origin_prefix(&self, prefix: &str) {
707 self.inner
708 .lock()
709 .borrow_mut()
710 .exclude_origin_prefixes
711 .push(prefix.into());
712 }
713
714 pub fn record_new_checkpoint(&self) -> LoroResult<()> {
715 self.doc.with_barrier(|| {});
718 let counter = get_counter_end(&self.doc, self.peer());
719 UndoManagerInner::record_checkpoint(&self.inner.lock(), counter, None);
720 Ok(())
721 }
722
723 #[instrument(skip_all)]
724 pub fn undo(&self) -> LoroResult<bool> {
725 self.perform(
726 |x| &mut x.undo_stack,
727 |x| &mut x.redo_stack,
728 UndoOrRedo::Undo,
729 )
730 }
731
732 #[instrument(skip_all)]
733 pub fn redo(&self) -> LoroResult<bool> {
734 self.perform(
735 |x| &mut x.redo_stack,
736 |x| &mut x.undo_stack,
737 UndoOrRedo::Redo,
738 )
739 }
740
741 fn perform(
742 &self,
743 get_stack: impl Fn(&mut UndoManagerInner) -> &mut Stack,
744 get_opposite: impl Fn(&mut UndoManagerInner) -> &mut Stack,
745 kind: UndoOrRedo,
746 ) -> LoroResult<bool> {
747 let doc = &self.doc.clone();
748 self.record_new_checkpoint()?;
802 let end_counter = get_counter_end(doc, self.peer());
803 let mut top = {
804 let lock = self.inner.lock();
805 let mut inner = lock.borrow_mut();
806 inner.processing_undo = true;
807 get_stack(&mut inner).pop()
808 };
809
810 let mut executed = false;
811 while let Some((mut span, remote_diff)) = top {
812 let mut next_push_selection = None;
813 {
814 let inner = self.inner.clone();
815 let remote_change_clone = remote_diff.lock().clone();
817 let commit = doc.undo_internal(
818 IdSpan {
819 peer: self.peer(),
820 counter: span.span,
821 },
822 &mut self.container_remap.lock(),
823 Some(&remote_change_clone),
824 &mut |diff| {
825 info_span!("transform remote diff").in_scope(|| {
826 let inner = inner.lock();
827 get_stack(&mut inner.borrow_mut()).transform_based_on_this_delta(diff);
829 });
830 },
831 )?;
832 drop(commit);
833 let inner = self.inner.lock();
834 let mut is_some = false;
835
836 if let Some(on_pop) = inner.borrow().on_pop.as_ref() {
837 is_some = true;
838 for cursor in span.meta.cursors.iter_mut() {
839 transform_cursor(
843 cursor,
844 &remote_diff.lock(),
845 doc,
846 &self.container_remap.lock(),
847 );
848 }
849
850 on_pop(kind, span.span, span.meta.clone());
851 }
852 if is_some {
853 let take = inner.borrow_mut().last_popped_selection.take();
854 next_push_selection = take;
855 inner.borrow_mut().last_popped_selection = Some(span.meta.cursors);
856 }
857 }
858 let new_counter = get_counter_end(doc, self.peer());
859 if end_counter != new_counter {
860 let inner = self.inner.lock();
861 let mut meta = inner
862 .borrow()
863 .on_push
864 .as_ref()
865 .map(|x| {
866 x(
867 kind.opposite(),
868 CounterSpan::new(end_counter, new_counter),
869 None,
870 )
871 })
872 .unwrap_or_default();
873
874 if matches!(kind, UndoOrRedo::Undo)
875 && get_opposite(&mut inner.borrow_mut()).is_empty()
876 {
877 } else if let Some(inner) = next_push_selection.take() {
879 meta.cursors = inner;
881 }
882
883 get_opposite(&mut inner.borrow_mut())
884 .push(CounterSpan::new(end_counter, new_counter), meta);
885 inner.borrow_mut().next_counter = Some(new_counter);
886 executed = true;
887 break;
888 } else {
889 top = get_stack(&mut self.inner.lock().borrow_mut()).pop();
891 continue;
892 }
893 }
894
895 self.inner.lock().borrow_mut().processing_undo = false;
896 Ok(executed)
897 }
898
899 pub fn can_undo(&self) -> bool {
900 !self.inner.lock().borrow().undo_stack.is_empty()
901 }
902
903 pub fn can_redo(&self) -> bool {
904 !self.inner.lock().borrow().redo_stack.is_empty()
905 }
906
907 pub fn undo_count(&self) -> usize {
908 self.inner.lock().borrow().undo_stack.len()
909 }
910
911 pub fn redo_count(&self) -> usize {
912 self.inner.lock().borrow().redo_stack.len()
913 }
914
915 pub fn top_undo_meta(&self) -> Option<UndoItemMeta> {
917 self.inner.lock().borrow().undo_stack.peek_top_meta()
918 }
919
920 pub fn top_redo_meta(&self) -> Option<UndoItemMeta> {
922 self.inner.lock().borrow().redo_stack.peek_top_meta()
923 }
924
925 pub fn top_undo_value(&self) -> Option<LoroValue> {
927 self.top_undo_meta().map(|m| m.value)
928 }
929
930 pub fn top_redo_value(&self) -> Option<LoroValue> {
932 self.top_redo_meta().map(|m| m.value)
933 }
934
935 pub fn set_on_push(&self, on_push: Option<OnPush>) {
936 self.inner.lock().borrow_mut().on_push = on_push;
937 }
938
939 pub fn set_on_pop(&self, on_pop: Option<OnPop>) {
940 self.inner.lock().borrow_mut().on_pop = on_pop;
941 }
942
943 pub fn clear(&self) {
944 self.inner.lock().borrow_mut().undo_stack.clear();
945 self.inner.lock().borrow_mut().redo_stack.clear();
946 }
947
948 pub fn clear_redo(&self) {
950 self.inner.lock().borrow_mut().redo_stack.clear();
951 }
952
953 pub fn clear_undo(&self) {
955 self.inner.lock().borrow_mut().undo_stack.clear();
956 }
957
958 pub fn set_top_undo_meta(&self, meta: UndoItemMeta) {
959 self.inner.lock().borrow_mut().undo_stack.set_top_meta(meta);
960 }
961
962 pub fn set_top_redo_meta(&self, meta: UndoItemMeta) {
963 self.inner.lock().borrow_mut().redo_stack.set_top_meta(meta);
964 }
965}
966
967pub(crate) fn undo(
981 spans: Vec<(IdSpan, Frontiers)>,
982 last_frontiers_or_last_bi: Either<&Frontiers, &DiffBatch>,
983 calc_diff: impl Fn(&Frontiers, &Frontiers) -> DiffBatch,
984 on_last_event_a: &mut dyn FnMut(&DiffBatch),
985) -> DiffBatch {
986 let mut last_ci: Option<DiffBatch> = None;
1004 for i in 0..spans.len() {
1005 debug_span!("Undo", ?i, "Undo span {:?}", &spans[i]).in_scope(|| {
1006 let (this_id_span, this_deps) = &spans[i];
1007 let mut event_a_i = debug_span!("1. Calc event A_i").in_scope(|| {
1011 calc_diff(&this_id_span.id_last().into(), this_deps)
1013 });
1014
1015 let stack_diff_batch;
1021 let event_b_i = 'block: {
1022 let next = if i + 1 < spans.len() {
1023 spans[i + 1].0.id_last().into()
1024 } else {
1025 match last_frontiers_or_last_bi {
1026 Either::Left(last_frontiers) => last_frontiers.clone(),
1027 Either::Right(right) => break 'block right,
1028 }
1029 };
1030 stack_diff_batch = Some(calc_diff(&this_id_span.id_last().into(), &next));
1031 stack_diff_batch.as_ref().unwrap()
1032 };
1033
1034 let mut event_a_prime = if let Some(mut last_ci) = last_ci.take() {
1038 last_ci.transform(&event_a_i, true);
1042
1043 event_a_i.compose(&last_ci);
1044 event_a_i
1045 } else {
1046 event_a_i
1047 };
1048 if i == spans.len() - 1 {
1049 on_last_event_a(&event_a_prime);
1050 }
1051 event_a_prime.transform(event_b_i, true);
1055
1056 let c_i = event_a_prime;
1059 last_ci = Some(c_i);
1060 });
1061 }
1062
1063 last_ci.unwrap()
1064}