jmbl 0.5.0

A high performance CRDT
Documentation
use std::cmp::Ordering;

use crate::{
    mem_use::{MemUsage, MemUser},
    ops::OpKind,
    sort::only_in_first_sorted_list,
};

use super::{
    ops::{Op, OpID},
    sort::{noncausal_sort, search_from_back},
};

#[derive(Clone, PartialEq, Eq, Hash, Debug)]

pub(crate) struct UndoRedoState {
    undo_redos: im::HashMap<OpID, im::Vector<Op>>,
}

impl UndoRedoState {
    pub fn new() -> Self {
        UndoRedoState {
            undo_redos: Default::default(),
        }
    }

    pub fn insert(&self, op: Op) -> Self {
        let new_undo_redos = self.undo_redos.alter(
            |maybe_undo_redo_list| {
                let undo_redo_list = maybe_undo_redo_list.unwrap_or_default();
                let maybe_insertion_position =
                    search_from_back(&undo_redo_list, &op, noncausal_sort);
                match maybe_insertion_position {
                    Ok(_exists) => Some(undo_redo_list),
                    Err(insertion_position) => {
                        let mut new_undo_redo_list = undo_redo_list;
                        new_undo_redo_list.insert(insertion_position, op.clone());
                        Some(new_undo_redo_list)
                    }
                }
            },
            op.prev,
        );

        UndoRedoState {
            undo_redos: new_undo_redos,
        }
    }

    pub fn is_undone(&self, op_hash: OpID) -> bool {
        if let Some(list) = self.undo_redos.get(&op_hash) {
            if let Some(last) = list.last() {
                matches!(last.kind, OpKind::Undo)
            } else {
                false
            }
        } else {
            false
        }
    }

    pub fn _is_undo_redo_newest(&self, op: &Op) -> bool {
        if let Some(last) = self
            .undo_redos
            .get(&op.prev)
            .and_then(|list| list.last().cloned())
        {
            noncausal_sort(&last, op) == Ordering::Less
        } else {
            true
        }
    }

    pub fn _would_have_effect(&self, op: &Op) -> bool {
        if !self._is_undo_redo_newest(op) {
            return false;
        }
        let currently_undone = self.is_undone(op.prev);
        !((matches!(op.kind, OpKind::Undo) && currently_undone)
            || (matches!(op.kind, OpKind::Redo) && !currently_undone))
    }

    pub fn ops_since(&self, maybe_other: Option<&Self>) -> Vec<Op> {
        let mut missing_undo_redos = vec![];
        let other_undo_redos = maybe_other.map(|other| &other.undo_redos);

        for (undone_hash, ops) in &self.undo_redos {
            if let Some(list_in_other_map) = other_undo_redos
                .as_ref()
                .and_then(|other_map| other_map.get(undone_hash))
            {
                missing_undo_redos.extend(only_in_first_sorted_list(
                    ops,
                    list_in_other_map,
                    noncausal_sort,
                ))
            } else {
                missing_undo_redos.extend(ops.iter().cloned())
            }
        }

        missing_undo_redos
    }

    pub fn keys(&self) -> impl Iterator<Item = &OpID> {
        self.undo_redos.keys()
    }
}

impl MemUser for UndoRedoState {
    fn mem_use(&self) -> MemUsage {
        self.undo_redos.mem_use()
    }
}