use std;
use std::borrow::Cow;
use std::collections::hash_map::DefaultHasher;
use std::collections::BTreeSet;
use crate::delta::{Delta, InsertDelta};
use crate::interval::Interval;
use crate::multiset::{CountMatcher, Subset};
use crate::rope::{Rope, RopeInfo};
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Engine {
#[cfg_attr(feature = "serde", serde(default = "default_session", skip_serializing))]
session: SessionId,
#[cfg_attr(feature = "serde", serde(default = "initial_revision_counter", skip_serializing))]
rev_id_counter: u32,
text: Rope,
tombstones: Rope,
deletes_from_union: Subset,
undone_groups: BTreeSet<usize>,
revs: Vec<Revision>,
}
#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct RevId {
session1: u64,
session2: u32,
num: u32,
}
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
struct Revision {
rev_id: RevId,
max_undo_so_far: usize,
edit: Contents,
}
pub type RevToken = u64;
pub type SessionId = (u64, u32);
#[derive(Clone)]
pub enum Error {
MissingRevision(RevToken),
MalformedDelta { rev_len: usize, delta_len: usize },
}
#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq)]
struct FullPriority {
priority: usize,
session_id: SessionId,
}
use self::Contents::*;
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
enum Contents {
Edit {
priority: usize,
undo_group: usize,
inserts: Subset,
deletes: Subset,
},
Undo {
toggled_groups: BTreeSet<usize>,
deletes_bitxor: Subset,
},
}
fn default_session() -> (u64, u32) {
(1, 0)
}
#[cfg(feature = "serde")]
fn initial_revision_counter() -> u32 {
1
}
impl RevId {
pub fn token(&self) -> RevToken {
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
self.hash(&mut hasher);
hasher.finish()
}
pub fn session_id(&self) -> SessionId {
(self.session1, self.session2)
}
}
impl Engine {
pub fn new(initial_contents: Rope) -> Engine {
let mut engine = Engine::empty();
if !initial_contents.is_empty() {
let first_rev = engine.get_head_rev_id().token();
let delta = Delta::simple_edit(Interval::new(0, 0), initial_contents, 0);
engine.edit_rev(0, 0, first_rev, delta);
}
engine
}
pub fn empty() -> Engine {
let deletes_from_union = Subset::new(0);
let rev = Revision {
rev_id: RevId { session1: 0, session2: 0, num: 0 },
edit: Undo {
toggled_groups: BTreeSet::new(),
deletes_bitxor: deletes_from_union.clone(),
},
max_undo_so_far: 0,
};
Engine {
session: default_session(),
rev_id_counter: 1,
text: Rope::default(),
tombstones: Rope::default(),
deletes_from_union,
undone_groups: BTreeSet::new(),
revs: vec![rev],
}
}
fn next_rev_id(&self) -> RevId {
RevId { session1: self.session.0, session2: self.session.1, num: self.rev_id_counter }
}
fn find_rev(&self, rev_id: RevId) -> Option<usize> {
self.revs
.iter()
.enumerate()
.rev()
.find(|&(_, ref rev)| rev.rev_id == rev_id)
.map(|(i, _)| i)
}
fn find_rev_token(&self, rev_token: RevToken) -> Option<usize> {
self.revs
.iter()
.enumerate()
.rev()
.find(|&(_, ref rev)| rev.rev_id.token() == rev_token)
.map(|(i, _)| i)
}
fn deletes_from_union_for_index(&self, rev_index: usize) -> Cow<Subset> {
self.deletes_from_union_before_index(rev_index + 1, true)
}
fn deletes_from_union_before_index(&self, rev_index: usize, invert_undos: bool) -> Cow<Subset> {
let mut deletes_from_union = Cow::Borrowed(&self.deletes_from_union);
let mut undone_groups = Cow::Borrowed(&self.undone_groups);
for rev in self.revs[rev_index..].iter().rev() {
deletes_from_union = match rev.edit {
Edit { ref inserts, ref deletes, ref undo_group, .. } => {
if undone_groups.contains(undo_group) {
Cow::Owned(deletes_from_union.transform_shrink(inserts))
} else {
let un_deleted = deletes_from_union.subtract(deletes);
Cow::Owned(un_deleted.transform_shrink(inserts))
}
}
Undo { ref toggled_groups, ref deletes_bitxor } => {
if invert_undos {
let new_undone =
undone_groups.symmetric_difference(toggled_groups).cloned().collect();
undone_groups = Cow::Owned(new_undone);
Cow::Owned(deletes_from_union.bitxor(deletes_bitxor))
} else {
deletes_from_union
}
}
}
}
deletes_from_union
}
fn rev_content_for_index(&self, rev_index: usize) -> Rope {
let old_deletes_from_union = self.deletes_from_cur_union_for_index(rev_index);
let delta =
Delta::synthesize(&self.tombstones, &self.deletes_from_union, &old_deletes_from_union);
delta.apply(&self.text)
}
fn deletes_from_cur_union_for_index(&self, rev_index: usize) -> Cow<Subset> {
let mut deletes_from_union = self.deletes_from_union_for_index(rev_index);
for rev in &self.revs[rev_index + 1..] {
if let Edit { ref inserts, .. } = rev.edit {
if !inserts.is_empty() {
deletes_from_union = Cow::Owned(deletes_from_union.transform_union(inserts));
}
}
}
deletes_from_union
}
pub fn max_undo_group_id(&self) -> usize {
self.revs.last().unwrap().max_undo_so_far
}
pub fn get_head_rev_id(&self) -> RevId {
self.revs.last().unwrap().rev_id
}
pub fn get_head(&self) -> &Rope {
&self.text
}
pub fn get_rev(&self, rev: RevToken) -> Option<Rope> {
self.find_rev_token(rev).map(|rev_index| self.rev_content_for_index(rev_index))
}
pub fn try_delta_rev_head(&self, base_rev: RevToken) -> Result<Delta<RopeInfo>, Error> {
let ix = self.find_rev_token(base_rev).ok_or_else(|| Error::MissingRevision(base_rev))?;
let prev_from_union = self.deletes_from_cur_union_for_index(ix);
let old_tombstones = shuffle_tombstones(
&self.text,
&self.tombstones,
&self.deletes_from_union,
&prev_from_union,
);
Ok(Delta::synthesize(&old_tombstones, &prev_from_union, &self.deletes_from_union))
}
fn mk_new_rev(
&self,
new_priority: usize,
undo_group: usize,
base_rev: RevToken,
delta: Delta<RopeInfo>,
) -> Result<(Revision, Rope, Rope, Subset), Error> {
let ix = self.find_rev_token(base_rev).ok_or_else(|| Error::MissingRevision(base_rev))?;
let (ins_delta, deletes) = delta.factor();
let deletes_at_rev = self.deletes_from_union_for_index(ix);
if ins_delta.base_len != deletes_at_rev.len_after_delete() {
return Err(Error::MalformedDelta {
delta_len: ins_delta.base_len,
rev_len: deletes_at_rev.len_after_delete(),
});
}
let mut union_ins_delta = ins_delta.transform_expand(&deletes_at_rev, true);
let mut new_deletes = deletes.transform_expand(&deletes_at_rev);
let new_full_priority = FullPriority { priority: new_priority, session_id: self.session };
for r in &self.revs[ix + 1..] {
if let Edit { priority, ref inserts, .. } = r.edit {
if !inserts.is_empty() {
let full_priority =
FullPriority { priority, session_id: r.rev_id.session_id() };
let after = new_full_priority >= full_priority;
union_ins_delta = union_ins_delta.transform_expand(inserts, after);
new_deletes = new_deletes.transform_expand(inserts);
}
}
}
let new_inserts = union_ins_delta.inserted_subset();
if !new_inserts.is_empty() {
new_deletes = new_deletes.transform_expand(&new_inserts);
}
let cur_deletes_from_union = &self.deletes_from_union;
let text_ins_delta = union_ins_delta.transform_shrink(cur_deletes_from_union);
let text_with_inserts = text_ins_delta.apply(&self.text);
let rebased_deletes_from_union = cur_deletes_from_union.transform_expand(&new_inserts);
let undone = self.undone_groups.contains(&undo_group);
let new_deletes_from_union = {
let to_delete = if undone { &new_inserts } else { &new_deletes };
rebased_deletes_from_union.union(to_delete)
};
let (new_text, new_tombstones) = shuffle(
&text_with_inserts,
&self.tombstones,
&rebased_deletes_from_union,
&new_deletes_from_union,
);
let head_rev = &self.revs.last().unwrap();
Ok((
Revision {
rev_id: self.next_rev_id(),
max_undo_so_far: std::cmp::max(undo_group, head_rev.max_undo_so_far),
edit: Edit {
priority: new_priority,
undo_group,
inserts: new_inserts,
deletes: new_deletes,
},
},
new_text,
new_tombstones,
new_deletes_from_union,
))
}
pub fn edit_rev(
&mut self,
priority: usize,
undo_group: usize,
base_rev: RevToken,
delta: Delta<RopeInfo>,
) {
self.try_edit_rev(priority, undo_group, base_rev, delta).unwrap();
}
pub fn try_edit_rev(
&mut self,
priority: usize,
undo_group: usize,
base_rev: RevToken,
delta: Delta<RopeInfo>,
) -> Result<(), Error> {
let (new_rev, new_text, new_tombstones, new_deletes_from_union) =
self.mk_new_rev(priority, undo_group, base_rev, delta)?;
self.rev_id_counter += 1;
self.revs.push(new_rev);
self.text = new_text;
self.tombstones = new_tombstones;
self.deletes_from_union = new_deletes_from_union;
Ok(())
}
fn empty_subset_before_first_rev(&self) -> Subset {
let first_rev = &self.revs.first().unwrap();
let len = match first_rev.edit {
Edit { ref inserts, .. } => inserts.count(CountMatcher::Zero),
Undo { ref deletes_bitxor, .. } => deletes_bitxor.count(CountMatcher::All),
};
Subset::new(len)
}
fn find_first_undo_candidate_index(&self, toggled_groups: &BTreeSet<usize>) -> usize {
if let Some(lowest_group) = toggled_groups.iter().cloned().next() {
for (i, rev) in self.revs.iter().enumerate().rev() {
if rev.max_undo_so_far < lowest_group {
return i + 1;
}
}
return 0;
} else {
return self.revs.len();
}
}
fn compute_undo(&self, groups: &BTreeSet<usize>) -> (Revision, Subset) {
let toggled_groups = self.undone_groups.symmetric_difference(&groups).cloned().collect();
let first_candidate = self.find_first_undo_candidate_index(&toggled_groups);
let mut deletes_from_union =
self.deletes_from_union_before_index(first_candidate, false).into_owned();
for rev in &self.revs[first_candidate..] {
if let Edit { ref undo_group, ref inserts, ref deletes, .. } = rev.edit {
if groups.contains(undo_group) {
if !inserts.is_empty() {
deletes_from_union = deletes_from_union.transform_union(inserts);
}
} else {
if !inserts.is_empty() {
deletes_from_union = deletes_from_union.transform_expand(inserts);
}
if !deletes.is_empty() {
deletes_from_union = deletes_from_union.union(deletes);
}
}
}
}
let deletes_bitxor = self.deletes_from_union.bitxor(&deletes_from_union);
let max_undo_so_far = self.revs.last().unwrap().max_undo_so_far;
(
Revision {
rev_id: self.next_rev_id(),
max_undo_so_far,
edit: Undo { toggled_groups, deletes_bitxor },
},
deletes_from_union,
)
}
pub fn undo(&mut self, groups: BTreeSet<usize>) {
let (new_rev, new_deletes_from_union) = self.compute_undo(&groups);
let (new_text, new_tombstones) = shuffle(
&self.text,
&self.tombstones,
&self.deletes_from_union,
&new_deletes_from_union,
);
self.text = new_text;
self.tombstones = new_tombstones;
self.deletes_from_union = new_deletes_from_union;
self.undone_groups = groups;
self.revs.push(new_rev);
self.rev_id_counter += 1;
}
pub fn is_equivalent_revision(&self, base_rev: RevId, other_rev: RevId) -> bool {
let base_subset = self
.find_rev(base_rev)
.map(|rev_index| self.deletes_from_cur_union_for_index(rev_index));
let other_subset = self
.find_rev(other_rev)
.map(|rev_index| self.deletes_from_cur_union_for_index(rev_index));
base_subset.is_some() && base_subset == other_subset
}
pub fn gc(&mut self, gc_groups: &BTreeSet<usize>) {
let mut gc_dels = self.empty_subset_before_first_rev();
let mut retain_revs = BTreeSet::new();
if let Some(last) = self.revs.last() {
retain_revs.insert(last.rev_id);
}
{
for rev in &self.revs {
if let Edit { ref undo_group, ref inserts, ref deletes, .. } = rev.edit {
if !retain_revs.contains(&rev.rev_id) && gc_groups.contains(undo_group) {
if self.undone_groups.contains(undo_group) {
if !inserts.is_empty() {
gc_dels = gc_dels.transform_union(inserts);
}
} else {
if !inserts.is_empty() {
gc_dels = gc_dels.transform_expand(inserts);
}
if !deletes.is_empty() {
gc_dels = gc_dels.union(deletes);
}
}
} else if !inserts.is_empty() {
gc_dels = gc_dels.transform_expand(inserts);
}
}
}
}
if !gc_dels.is_empty() {
let not_in_tombstones = self.deletes_from_union.complement();
let dels_from_tombstones = gc_dels.transform_shrink(¬_in_tombstones);
self.tombstones = dels_from_tombstones.delete_from(&self.tombstones);
self.deletes_from_union = self.deletes_from_union.transform_shrink(&gc_dels);
}
let old_revs = std::mem::replace(&mut self.revs, Vec::new());
for rev in old_revs.into_iter().rev() {
match rev.edit {
Edit { priority, undo_group, inserts, deletes } => {
let new_gc_dels = if inserts.is_empty() {
None
} else {
Some(gc_dels.transform_shrink(&inserts))
};
if retain_revs.contains(&rev.rev_id) || !gc_groups.contains(&undo_group) {
let (inserts, deletes) = if gc_dels.is_empty() {
(inserts, deletes)
} else {
(inserts.transform_shrink(&gc_dels), deletes.transform_shrink(&gc_dels))
};
self.revs.push(Revision {
rev_id: rev.rev_id,
max_undo_so_far: rev.max_undo_so_far,
edit: Edit { priority, undo_group, inserts, deletes },
});
}
if let Some(new_gc_dels) = new_gc_dels {
gc_dels = new_gc_dels;
}
}
Undo { toggled_groups, deletes_bitxor } => {
if retain_revs.contains(&rev.rev_id) {
let new_deletes_bitxor = if gc_dels.is_empty() {
deletes_bitxor
} else {
deletes_bitxor.transform_shrink(&gc_dels)
};
self.revs.push(Revision {
rev_id: rev.rev_id,
max_undo_so_far: rev.max_undo_so_far,
edit: Undo {
toggled_groups: &toggled_groups - gc_groups,
deletes_bitxor: new_deletes_bitxor,
},
})
}
}
}
}
self.revs.reverse();
}
pub fn merge(&mut self, other: &Engine) {
let (mut new_revs, text, tombstones, deletes_from_union) = {
let base_index = find_base_index(&self.revs, &other.revs);
let a_to_merge = &self.revs[base_index..];
let b_to_merge = &other.revs[base_index..];
let common = find_common(a_to_merge, b_to_merge);
let a_new = rearrange(a_to_merge, &common, self.deletes_from_union.len());
let b_new = rearrange(b_to_merge, &common, other.deletes_from_union.len());
let b_deltas =
compute_deltas(&b_new, &other.text, &other.tombstones, &other.deletes_from_union);
let expand_by = compute_transforms(a_new);
let max_undo = self.max_undo_group_id();
rebase(
expand_by,
b_deltas,
self.text.clone(),
self.tombstones.clone(),
self.deletes_from_union.clone(),
max_undo,
)
};
self.text = text;
self.tombstones = tombstones;
self.deletes_from_union = deletes_from_union;
self.revs.append(&mut new_revs);
}
pub fn set_session_id(&mut self, session: SessionId) {
assert_eq!(
1,
self.revs.len(),
"Revisions were added to an Engine before set_session_id, these may collide."
);
self.session = session;
}
}
fn shuffle_tombstones(
text: &Rope,
tombstones: &Rope,
old_deletes_from_union: &Subset,
new_deletes_from_union: &Subset,
) -> Rope {
let inverse_tombstones_map = old_deletes_from_union.complement();
let move_delta =
Delta::synthesize(text, &inverse_tombstones_map, &new_deletes_from_union.complement());
move_delta.apply(tombstones)
}
fn shuffle(
text: &Rope,
tombstones: &Rope,
old_deletes_from_union: &Subset,
new_deletes_from_union: &Subset,
) -> (Rope, Rope) {
let del_delta = Delta::synthesize(tombstones, old_deletes_from_union, new_deletes_from_union);
let new_text = del_delta.apply(text);
(new_text, shuffle_tombstones(text, tombstones, old_deletes_from_union, new_deletes_from_union))
}
fn find_base_index(a: &[Revision], b: &[Revision]) -> usize {
assert!(!a.is_empty() && !b.is_empty());
assert!(a[0].rev_id == b[0].rev_id);
1
}
fn find_common(a: &[Revision], b: &[Revision]) -> BTreeSet<RevId> {
let a_ids: BTreeSet<RevId> = a.iter().map(|r| r.rev_id).collect();
let b_ids: BTreeSet<RevId> = b.iter().map(|r| r.rev_id).collect();
a_ids.intersection(&b_ids).cloned().collect()
}
fn rearrange(revs: &[Revision], base_revs: &BTreeSet<RevId>, head_len: usize) -> Vec<Revision> {
let mut s = Subset::new(head_len);
let mut out = Vec::with_capacity(revs.len() - base_revs.len());
for rev in revs.iter().rev() {
let is_base = base_revs.contains(&rev.rev_id);
let contents = match rev.edit {
Contents::Edit { priority, undo_group, ref inserts, ref deletes } => {
if is_base {
s = inserts.transform_union(&s);
None
} else {
let transformed_inserts = inserts.transform_expand(&s);
let transformed_deletes = deletes.transform_expand(&s);
s = s.transform_shrink(&transformed_inserts);
Some(Contents::Edit {
inserts: transformed_inserts,
deletes: transformed_deletes,
priority,
undo_group,
})
}
}
Contents::Undo { .. } => panic!("can't merge undo yet"),
};
if let Some(edit) = contents {
out.push(Revision { edit, rev_id: rev.rev_id, max_undo_so_far: rev.max_undo_so_far });
}
}
out.as_mut_slice().reverse();
out
}
#[derive(Clone, Debug)]
struct DeltaOp {
rev_id: RevId,
priority: usize,
undo_group: usize,
inserts: InsertDelta<RopeInfo>,
deletes: Subset,
}
fn compute_deltas(
revs: &[Revision],
text: &Rope,
tombstones: &Rope,
deletes_from_union: &Subset,
) -> Vec<DeltaOp> {
let mut out = Vec::with_capacity(revs.len());
let mut cur_all_inserts = Subset::new(deletes_from_union.len());
for rev in revs.iter().rev() {
match rev.edit {
Contents::Edit { priority, undo_group, ref inserts, ref deletes } => {
let older_all_inserts = inserts.transform_union(&cur_all_inserts);
let tombstones_here =
shuffle_tombstones(text, tombstones, deletes_from_union, &older_all_inserts);
let delta =
Delta::synthesize(&tombstones_here, &older_all_inserts, &cur_all_inserts);
let (ins, _) = delta.factor();
out.push(DeltaOp {
rev_id: rev.rev_id,
priority,
undo_group,
inserts: ins,
deletes: deletes.clone(),
});
cur_all_inserts = older_all_inserts;
}
Contents::Undo { .. } => panic!("can't merge undo yet"),
}
}
out.as_mut_slice().reverse();
out
}
fn compute_transforms(revs: Vec<Revision>) -> Vec<(FullPriority, Subset)> {
let mut out = Vec::new();
let mut last_priority: Option<usize> = None;
for r in revs {
if let Contents::Edit { priority, inserts, .. } = r.edit {
if inserts.is_empty() {
continue;
}
if Some(priority) == last_priority {
let last: &mut (FullPriority, Subset) = out.last_mut().unwrap();
last.1 = last.1.transform_union(&inserts);
} else {
last_priority = Some(priority);
let prio = FullPriority { priority, session_id: r.rev_id.session_id() };
out.push((prio, inserts));
}
}
}
out
}
fn rebase(
mut expand_by: Vec<(FullPriority, Subset)>,
b_new: Vec<DeltaOp>,
mut text: Rope,
mut tombstones: Rope,
mut deletes_from_union: Subset,
mut max_undo_so_far: usize,
) -> (Vec<Revision>, Rope, Rope, Subset) {
let mut out = Vec::with_capacity(b_new.len());
let mut next_expand_by = Vec::with_capacity(expand_by.len());
for op in b_new {
let DeltaOp { rev_id, priority, undo_group, mut inserts, mut deletes } = op;
let full_priority = FullPriority { priority, session_id: rev_id.session_id() };
for &(trans_priority, ref trans_inserts) in &expand_by {
let after = full_priority >= trans_priority;
inserts = inserts.transform_expand(trans_inserts, after);
let inserted = inserts.inserted_subset();
let new_trans_inserts = trans_inserts.transform_expand(&inserted);
deletes = deletes.transform_expand(&new_trans_inserts);
next_expand_by.push((trans_priority, new_trans_inserts));
}
let text_inserts = inserts.transform_shrink(&deletes_from_union);
let text_with_inserts = text_inserts.apply(&text);
let inserted = inserts.inserted_subset();
let expanded_deletes_from_union = deletes_from_union.transform_expand(&inserted);
let new_deletes_from_union = expanded_deletes_from_union.union(&deletes);
let (new_text, new_tombstones) = shuffle(
&text_with_inserts,
&tombstones,
&expanded_deletes_from_union,
&new_deletes_from_union,
);
text = new_text;
tombstones = new_tombstones;
deletes_from_union = new_deletes_from_union;
max_undo_so_far = std::cmp::max(max_undo_so_far, undo_group);
out.push(Revision {
rev_id,
max_undo_so_far,
edit: Contents::Edit { priority, undo_group, deletes, inserts: inserted },
});
expand_by = next_expand_by;
next_expand_by = Vec::with_capacity(expand_by.len());
}
(out, text, tombstones, deletes_from_union)
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Error::MissingRevision(_) => write!(f, "Revision not found"),
Error::MalformedDelta { delta_len, rev_len } => {
write!(f, "Delta base_len {} does not match revision length {}", delta_len, rev_len)
}
}
}
}
impl std::fmt::Debug for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
impl std::error::Error for Error {}
#[cfg(test)]
#[rustfmt::skip]
mod tests {
use crate::engine::*;
use crate::rope::{Rope, RopeInfo};
use crate::delta::{Builder, Delta, DeltaElement};
use crate::multiset::Subset;
use crate::interval::Interval;
use std::collections::BTreeSet;
use crate::test_helpers::{parse_subset_list, parse_subset, parse_delta, debug_subsets};
const TEST_STR: &'static str = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
fn build_delta_1() -> Delta<RopeInfo> {
let mut d_builder = Builder::new(TEST_STR.len());
d_builder.delete(Interval::new(10, 36));
d_builder.replace(Interval::new(39, 42), Rope::from("DEEF"));
d_builder.replace(Interval::new(54, 54), Rope::from("999"));
d_builder.delete(Interval::new(58, 61));
d_builder.build()
}
fn build_delta_2() -> Delta<RopeInfo> {
let mut d_builder = Builder::new(TEST_STR.len());
d_builder.replace(Interval::new(1, 3), Rope::from("!"));
d_builder.delete(Interval::new(10, 36));
d_builder.replace(Interval::new(42, 45), Rope::from("GI"));
d_builder.replace(Interval::new(54, 54), Rope::from("888"));
d_builder.replace(Interval::new(59, 60), Rope::from("HI"));
d_builder.build()
}
#[test]
fn edit_rev_simple() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(0, 1, first_rev, build_delta_1());
assert_eq!("0123456789abcDEEFghijklmnopqr999stuvz", String::from(engine.get_head()));
}
#[test]
fn edit_rev_empty() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
let delta = Delta {
base_len: TEST_STR.len(),
els: vec![DeltaElement::Copy(0, TEST_STR.len())],
};
engine.edit_rev(0, 1, first_rev, delta.clone());
assert_eq!(TEST_STR, String::from(engine.get_head()));
engine.edit_rev(0, 1, first_rev, delta.clone());
assert_eq!(TEST_STR, String::from(engine.get_head()));
}
#[test]
fn edit_rev_concurrent() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, build_delta_1());
engine.edit_rev(0, 2, first_rev, build_delta_2());
assert_eq!("0!3456789abcDEEFGIjklmnopqr888999stuvHIz", String::from(engine.get_head()));
}
#[test]
#[should_panic(expected = "Delta base_len 5 does not match revision length 6")]
fn edit_rev_bad_delta_len() {
let test_str = "hello";
let mut engine = Engine::new(Rope::from(test_str));
let iv = Interval::new(1, 1);
let mut builder = Builder::new(test_str.len());
builder.replace(iv, "1".into());
let delta1 = builder.build();
let mut builder = Builder::new(test_str.len());
builder.replace(iv, "2".into());
let delta2 = builder.build();
let rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, rev, delta1);
let rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 2, rev, delta2);
}
fn undo_test(before: bool, undos : BTreeSet<usize>, output: &str) {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
if before {
engine.undo(undos.clone());
}
engine.edit_rev(1, 1, first_rev, build_delta_1());
engine.edit_rev(0, 2, first_rev, build_delta_2());
if !before {
engine.undo(undos);
}
assert_eq!(output, String::from(engine.get_head()));
}
#[test]
fn edit_rev_undo() {
undo_test(true, [1,2].iter().cloned().collect(), TEST_STR);
}
#[test]
fn edit_rev_undo_2() {
undo_test(true, [2].iter().cloned().collect(), "0123456789abcDEEFghijklmnopqr999stuvz");
}
#[test]
fn edit_rev_undo_3() {
undo_test(true, [1].iter().cloned().collect(), "0!3456789abcdefGIjklmnopqr888stuvwHIyz");
}
#[test]
fn try_delta_rev_head() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, build_delta_1());
let d = engine.try_delta_rev_head(first_rev).unwrap();
assert_eq!(String::from(engine.get_head()), d.apply_to_string(TEST_STR));
}
#[test]
fn try_delta_rev_head_2() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, build_delta_1());
engine.edit_rev(0, 2, first_rev, build_delta_2());
let d = engine.try_delta_rev_head(first_rev).unwrap();
assert_eq!(String::from(engine.get_head()), d.apply_to_string(TEST_STR));
}
#[test]
fn try_delta_rev_head_3() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, build_delta_1());
let after_first_edit = engine.get_head_rev_id().token();
engine.edit_rev(0, 2, first_rev, build_delta_2());
let d = engine.try_delta_rev_head(after_first_edit).unwrap();
assert_eq!(String::from(engine.get_head()), d.apply_to_string("0123456789abcDEEFghijklmnopqr999stuvz"));
}
#[test]
fn try_delta_rev_head_missing_token() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let first_rev = engine.get_head_rev_id().token();
let bad_rev = RevToken::default();
engine.edit_rev(1, 1, first_rev, build_delta_1());
let d = engine.try_delta_rev_head(bad_rev);
assert!(d.is_err());
}
#[test]
fn undo() {
undo_test(false, [1,2].iter().cloned().collect(), TEST_STR);
}
#[test]
fn undo_2() {
undo_test(false, [2].iter().cloned().collect(), "0123456789abcDEEFghijklmnopqr999stuvz");
}
#[test]
fn undo_3() {
undo_test(false, [1].iter().cloned().collect(), "0!3456789abcdefGIjklmnopqr888stuvwHIyz");
}
#[test]
fn undo_4() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let d1 = Delta::simple_edit(Interval::new(0,0), Rope::from("a"), TEST_STR.len());
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, d1.clone());
let new_head = engine.get_head_rev_id().token();
engine.undo([1].iter().cloned().collect());
let d2 = Delta::simple_edit(Interval::new(0,0), Rope::from("a"), TEST_STR.len()+1);
engine.edit_rev(1, 2, new_head, d2);
let new_head_2 = engine.get_head_rev_id().token();
let d3 = Delta::simple_edit(Interval::new(0,0), Rope::from("b"), TEST_STR.len()+1);
engine.edit_rev(1, 3, new_head_2, d3);
engine.undo([1,3].iter().cloned().collect());
assert_eq!("a0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", String::from(engine.get_head()));
}
#[test]
fn undo_5() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let d1 = Delta::simple_edit(Interval::new(0,10), Rope::from(""), TEST_STR.len());
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, d1.clone());
engine.edit_rev(1, 2, first_rev, d1.clone());
engine.undo([1].iter().cloned().collect());
assert_eq!("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", String::from(engine.get_head()));
engine.undo([1,2].iter().cloned().collect());
assert_eq!(TEST_STR, String::from(engine.get_head()));
engine.undo([].iter().cloned().collect());
assert_eq!("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", String::from(engine.get_head()));
}
#[test]
fn gc() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let d1 = Delta::simple_edit(Interval::new(0,0), Rope::from("c"), TEST_STR.len());
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, d1);
let new_head = engine.get_head_rev_id().token();
engine.undo([1].iter().cloned().collect());
let d2 = Delta::simple_edit(Interval::new(0,0), Rope::from("a"), TEST_STR.len()+1);
engine.edit_rev(1, 2, new_head, d2);
let gc : BTreeSet<usize> = [1].iter().cloned().collect();
engine.gc(&gc);
let d3 = Delta::simple_edit(Interval::new(0,0), Rope::from("b"), TEST_STR.len()+1);
let new_head_2 = engine.get_head_rev_id().token();
engine.edit_rev(1, 3, new_head_2, d3);
engine.undo([3].iter().cloned().collect());
assert_eq!("a0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", String::from(engine.get_head()));
}
fn gc_scenario(edits: usize, max_undos: usize) {
let mut engine = Engine::new(Rope::from(""));
for i in 0..edits {
let d = Delta::simple_edit(Interval::new(0,0), Rope::from("b"), i);
let head = engine.get_head_rev_id().token();
engine.edit_rev(1, i+1, head, d);
if i >= max_undos {
let to_gc : BTreeSet<usize> = [i-max_undos].iter().cloned().collect();
engine.gc(&to_gc)
}
}
let mut to_undo = BTreeSet::new();
for i in ((edits-max_undos)..edits).rev() {
to_undo.insert(i+1);
engine.undo(to_undo.clone());
}
let d1 = Delta::simple_edit(Interval::new(0,0), Rope::from("h"), engine.get_head().len());
let head = engine.get_head_rev_id().token();
engine.edit_rev(1, edits+1, head, d1);
engine.gc(&to_undo);
let chars_left = (edits-max_undos)+1;
let d2 = Delta::simple_edit(Interval::new(chars_left, chars_left), Rope::from("f"), engine.get_head().len());
let head2 = engine.get_head_rev_id().token();
engine.edit_rev(1, edits+1, head2, d2);
let mut soln = String::from("h");
for _ in 0..(edits-max_undos) {
soln.push('b');
}
soln.push('f');
assert_eq!(soln, String::from(engine.get_head()));
}
#[test]
fn gc_2() {
gc_scenario(4,3);
}
#[test]
fn gc_3() {
gc_scenario(35,20);
}
#[test]
fn gc_4() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let d1 = Delta::simple_edit(Interval::new(0,10), Rope::from(""), TEST_STR.len());
let first_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, first_rev, d1.clone());
engine.edit_rev(1, 2, first_rev, d1.clone());
let gc : BTreeSet<usize> = [1].iter().cloned().collect();
engine.gc(&gc);
engine.undo([1,2].iter().cloned().collect());
assert_eq!("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", String::from(engine.get_head()));
}
#[test]
fn gc_5() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let d1 = Delta::simple_edit(Interval::new(0,10), Rope::from(""), TEST_STR.len());
let initial_rev = engine.get_head_rev_id().token();
engine.undo([1].iter().cloned().collect());
engine.edit_rev(1, 1, initial_rev, d1.clone());
engine.edit_rev(1, 2, initial_rev, d1.clone());
let gc : BTreeSet<usize> = [1].iter().cloned().collect();
engine.gc(&gc);
assert_eq!("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", String::from(engine.get_head()));
engine.undo([2].iter().cloned().collect());
assert_eq!(TEST_STR, String::from(engine.get_head()));
}
#[test]
fn gc_6() {
let mut engine = Engine::new(Rope::from(TEST_STR));
let d1 = Delta::simple_edit(Interval::new(0,10), Rope::from(""), TEST_STR.len());
let initial_rev = engine.get_head_rev_id().token();
engine.edit_rev(1, 1, initial_rev, d1.clone());
engine.undo([1,2].iter().cloned().collect());
engine.edit_rev(1, 2, initial_rev, d1.clone());
let gc : BTreeSet<usize> = [1].iter().cloned().collect();
engine.gc(&gc);
assert_eq!(TEST_STR, String::from(engine.get_head()));
engine.undo([].iter().cloned().collect());
assert_eq!("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", String::from(engine.get_head()));
}
fn basic_rev(i: usize) -> RevId {
RevId { session1: 1, session2: 0, num: i as u32 }
}
fn basic_insert_ops(inserts: Vec<Subset>, priority: usize) -> Vec<Revision> {
inserts.into_iter().enumerate().map(|(i, inserts)| {
let deletes = Subset::new(inserts.len());
Revision {
rev_id: basic_rev(i+1),
max_undo_so_far: i+1,
edit: Contents::Edit {
priority, inserts, deletes,
undo_group: i+1,
}
}
}).collect()
}
#[test]
fn rearrange_1() {
let inserts = parse_subset_list("
##
-#-
#---
---#-
-----#
#------
");
let revs = basic_insert_ops(inserts, 1);
let base: BTreeSet<RevId> = [3,5].iter().cloned().map(basic_rev).collect();
let rearranged = rearrange(&revs, &base, 7);
let rearranged_inserts: Vec<Subset> = rearranged.into_iter().map(|c| {
match c.edit {
Contents::Edit {inserts, ..} => inserts,
Contents::Undo { .. } => panic!(),
}
}).collect();
debug_subsets(&rearranged_inserts);
let correct = parse_subset_list("
-##-
--#--
---#--
#------
");
assert_eq!(correct, rearranged_inserts);
}
fn ids_to_fake_revs(ids: &[usize]) -> Vec<Revision> {
let contents = Contents::Edit {
priority: 0,
undo_group: 0,
inserts: Subset::new(0),
deletes: Subset::new(0),
};
ids.iter().cloned().map(|i| {
Revision {
rev_id: basic_rev(i),
max_undo_so_far: i,
edit: contents.clone()
}
}).collect()
}
#[test]
fn find_common_1() {
let a: Vec<Revision> = ids_to_fake_revs(&[0,2,4,6,8,10,12]);
let b: Vec<Revision> = ids_to_fake_revs(&[0,1,2,4,5,8,9]);
let res = find_common(&a, &b);
let correct: BTreeSet<RevId> = [0,2,4,8].iter().cloned().map(basic_rev).collect();
assert_eq!(correct, res);
}
#[test]
fn find_base_1() {
let a: Vec<Revision> = ids_to_fake_revs(&[0,2,4,6,8,10,12]);
let b: Vec<Revision> = ids_to_fake_revs(&[0,1,2,4,5,8,9]);
let res = find_base_index(&a, &b);
assert_eq!(1, res);
}
#[test]
fn compute_deltas_1() {
let inserts = parse_subset_list("
-##-
--#--
---#--
#------
");
let revs = basic_insert_ops(inserts, 1);
let text = Rope::from("13456");
let tombstones = Rope::from("27");
let deletes_from_union = parse_subset("-#----#");
let delta_ops = compute_deltas(&revs, &text, &tombstones, &deletes_from_union);
println!("{:#?}", delta_ops);
let mut r = Rope::from("27");
for op in &delta_ops {
r = op.inserts.apply(&r);
}
assert_eq!("1234567", String::from(r));
}
#[test]
fn compute_transforms_1() {
let inserts = parse_subset_list("
-##-
--#--
---#--
#------
");
let revs = basic_insert_ops(inserts, 1);
let expand_by = compute_transforms(revs);
assert_eq!(1, expand_by.len());
assert_eq!(1, expand_by[0].0.priority);
let subset_str = format!("{:#?}", expand_by[0].1);
assert_eq!("#-####-", &subset_str);
}
#[test]
fn compute_transforms_2() {
let inserts_1 = parse_subset_list("
-##-
--#--
");
let mut revs = basic_insert_ops(inserts_1, 1);
let inserts_2 = parse_subset_list("
----
");
let mut revs_2 = basic_insert_ops(inserts_2, 4);
revs.append(&mut revs_2);
let inserts_3 = parse_subset_list("
---#--
#------
");
let mut revs_3 = basic_insert_ops(inserts_3, 2);
revs.append(&mut revs_3);
let expand_by = compute_transforms(revs);
assert_eq!(2, expand_by.len());
assert_eq!(1, expand_by[0].0.priority);
assert_eq!(2, expand_by[1].0.priority);
let subset_str = format!("{:#?}", expand_by[0].1);
assert_eq!("-###-", &subset_str);
let subset_str = format!("{:#?}", expand_by[1].1);
assert_eq!("#---#--", &subset_str);
}
#[test]
fn rebase_1() {
let inserts = parse_subset_list("
--#-
----#
");
let a_revs = basic_insert_ops(inserts.clone(), 1);
let b_revs = basic_insert_ops(inserts, 2);
let text_b = Rope::from("zpbj");
let tombstones_b = Rope::from("a");
let deletes_from_union_b = parse_subset("-#---");
let b_delta_ops = compute_deltas(&b_revs, &text_b, &tombstones_b, &deletes_from_union_b);
println!("{:#?}", b_delta_ops);
let text_a = Rope::from("zcbd");
let tombstones_a = Rope::from("a");
let deletes_from_union_a = parse_subset("-#---");
let expand_by = compute_transforms(a_revs);
let (revs, text_2, tombstones_2, deletes_from_union_2) =
rebase(expand_by, b_delta_ops, text_a, tombstones_a, deletes_from_union_a, 0);
let rebased_inserts: Vec<Subset> = revs.into_iter().map(|c| {
match c.edit {
Contents::Edit {inserts, ..} => inserts,
Contents::Undo { .. } => panic!(),
}
}).collect();
debug_subsets(&rebased_inserts);
let correct = parse_subset_list("
---#--
------#
");
assert_eq!(correct, rebased_inserts);
assert_eq!("zcpbdj", String::from(&text_2));
assert_eq!("a", String::from(&tombstones_2));
assert_eq!("-#-----", format!("{:#?}", deletes_from_union_2));
}
#[derive(Clone, Debug)]
enum MergeTestOp {
Merge(usize, usize),
Assert(usize, String),
AssertAll(String),
AssertMaxUndoSoFar(usize, usize),
Edit { ei: usize, p: usize, u: usize, d: Delta<RopeInfo> },
}
#[derive(Debug)]
struct MergeTestState {
peers: Vec<Engine>,
}
impl MergeTestState {
fn new(count: usize) -> MergeTestState {
let mut peers = Vec::with_capacity(count);
for i in 0..count {
let mut peer = Engine::new(Rope::from(""));
peer.set_session_id(((i*1000) as u64, 0));
peers.push(peer);
}
MergeTestState { peers }
}
fn run_op(&mut self, op: &MergeTestOp) {
match *op {
MergeTestOp::Merge(ai, bi) => {
let (start, end) = self.peers.split_at_mut(ai);
let (a, rest) = end.split_first_mut().unwrap();
let b = if bi < ai {
&mut start[bi]
} else {
&mut rest[bi - ai - 1]
};
a.merge(b);
},
MergeTestOp::Assert(ei, ref correct) => {
let e = &mut self.peers[ei];
assert_eq!(correct, &String::from(e.get_head()), "for peer {}", ei);
},
MergeTestOp::AssertMaxUndoSoFar(ei, correct) => {
let e = &mut self.peers[ei];
assert_eq!(correct, e.max_undo_group_id(), "for peer {}", ei);
},
MergeTestOp::AssertAll(ref correct) => {
for (ei, e) in self.peers.iter().enumerate() {
assert_eq!(correct, &String::from(e.get_head()), "for peer {}", ei);
}
},
MergeTestOp::Edit { ei, p, u, d: ref delta } => {
let e = &mut self.peers[ei];
let head = e.get_head_rev_id().token();
e.edit_rev(p, u, head, delta.clone());
},
}
}
fn run_script(&mut self, script: &[MergeTestOp]) {
for (i, op) in script.iter().enumerate() {
println!("running {:?} at index {}", op, i);
self.run_op(op);
}
}
}
#[test]
fn merge_insert_only_whiteboard() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 2, p: 1, u: 1, d: parse_delta("ab") },
Merge(0,2), Merge(1, 2),
Assert(0, "ab".to_owned()),
Assert(1, "ab".to_owned()),
Assert(2, "ab".to_owned()),
Edit { ei: 0, p: 3, u: 1, d: parse_delta("-c-") },
Edit { ei: 0, p: 3, u: 1, d: parse_delta("---d") },
Assert(0, "acbd".to_owned()),
Edit { ei: 1, p: 5, u: 1, d: parse_delta("-p-") },
Edit { ei: 1, p: 5, u: 1, d: parse_delta("---j") },
Assert(1, "apbj".to_owned()),
Edit { ei: 2, p: 1, u: 1, d: parse_delta("z--") },
Merge(0,2), Merge(1, 2),
Assert(0, "zacbd".to_owned()),
Assert(1, "zapbj".to_owned()),
Merge(0,1),
Assert(0, "zacpbdj".to_owned()),
];
MergeTestState::new(3).run_script(&script[..]);
}
#[test]
fn merge_priorities() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 2, p: 1, u: 1, d: parse_delta("ab") },
Merge(0,2), Merge(1, 2),
Assert(0, "ab".to_owned()),
Assert(1, "ab".to_owned()),
Assert(2, "ab".to_owned()),
Edit { ei: 0, p: 3, u: 1, d: parse_delta("-c-") },
Edit { ei: 0, p: 3, u: 1, d: parse_delta("---d") },
Assert(0, "acbd".to_owned()),
Edit { ei: 1, p: 5, u: 1, d: parse_delta("-p-") },
Assert(1, "apb".to_owned()),
Edit { ei: 2, p: 4, u: 1, d: parse_delta("-r-") },
Merge(0,2), Merge(1, 2),
Assert(0, "acrbd".to_owned()),
Assert(1, "arpb".to_owned()),
Edit { ei: 1, p: 5, u: 1, d: parse_delta("----j") },
Assert(1, "arpbj".to_owned()),
Edit { ei: 2, p: 4, u: 1, d: parse_delta("---z") },
Merge(0,2), Merge(1, 2),
Assert(0, "acrbdz".to_owned()),
Assert(1, "arpbzj".to_owned()),
Merge(0,1),
Assert(0, "acrpbdzj".to_owned()),
];
MergeTestState::new(3).run_script(&script[..]);
}
#[test]
fn merge_idempotent() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 2, p: 1, u: 1, d: parse_delta("ab") },
Merge(0,2), Merge(1, 2),
Assert(0, "ab".to_owned()),
Assert(1, "ab".to_owned()),
Assert(2, "ab".to_owned()),
Edit { ei: 0, p: 3, u: 1, d: parse_delta("-c-") },
Edit { ei: 0, p: 3, u: 1, d: parse_delta("---d") },
Assert(0, "acbd".to_owned()),
Edit { ei: 1, p: 5, u: 1, d: parse_delta("-p-") },
Edit { ei: 1, p: 5, u: 1, d: parse_delta("---j") },
Merge(0,1),
Assert(0, "acpbdj".to_owned()),
Merge(0,1), Merge(1,0), Merge(0,1), Merge(1,0),
Assert(0, "acpbdj".to_owned()),
Assert(1, "acpbdj".to_owned()),
];
MergeTestState::new(3).run_script(&script[..]);
}
#[test]
fn merge_associative() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 2, p: 1, u: 1, d: parse_delta("ab") },
Merge(0,2), Merge(1, 2),
Edit { ei: 0, p: 3, u: 1, d: parse_delta("-c-") },
Edit { ei: 1, p: 5, u: 1, d: parse_delta("-p-") },
Edit { ei: 2, p: 2, u: 1, d: parse_delta("z--") },
Merge(3, 0), Merge(4, 1), Merge(5, 2),
Merge(1,2),
Merge(0,1),
Assert(0, "zacpb".to_owned()),
Merge(4,3),
Merge(5,4),
Assert(5, "zacpb".to_owned()),
Merge(0,5), Merge(2,5), Merge(4,5), Merge(1,4),
Merge(3,1), Merge(5,3),
AssertAll("zacpb".to_owned()),
];
MergeTestState::new(6).run_script(&script[..]);
}
#[test]
fn merge_simple_delete_1() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 0, p: 1, u: 1, d: parse_delta("abc") },
Merge(1,0),
Assert(0, "abc".to_owned()),
Assert(1, "abc".to_owned()),
Edit { ei: 0, p: 1, u: 1, d: parse_delta("!-d-") },
Assert(0, "bdc".to_owned()),
Edit { ei: 1, p: 3, u: 1, d: parse_delta("--efg!") },
Assert(1, "abefg".to_owned()),
Merge(1,0),
Assert(1, "bdefg".to_owned()),
];
MergeTestState::new(2).run_script(&script[..]);
}
#[test]
fn merge_simple_delete_2() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 0, p: 1, u: 1, d: parse_delta("ab") },
Merge(1,0),
Assert(0, "ab".to_owned()),
Assert(1, "ab".to_owned()),
Edit { ei: 0, p: 1, u: 1, d: parse_delta("!-") },
Assert(0, "b".to_owned()),
Edit { ei: 1, p: 3, u: 1, d: parse_delta("-c-") },
Assert(1, "acb".to_owned()),
Merge(1,0),
Assert(1, "cb".to_owned()),
];
MergeTestState::new(2).run_script(&script[..]);
}
#[test]
fn merge_whiteboard() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 2, p: 1, u: 1, d: parse_delta("ab") },
Merge(0,2), Merge(1, 2), Merge(3, 2),
Assert(0, "ab".to_owned()),
Assert(1, "ab".to_owned()),
Assert(2, "ab".to_owned()),
Assert(3, "ab".to_owned()),
Edit { ei: 2, p: 1, u: 1, d: parse_delta("!-") },
Assert(2, "b".to_owned()),
Edit { ei: 0, p: 3, u: 1, d: parse_delta("-c-") },
Edit { ei: 0, p: 3, u: 1, d: parse_delta("---d") },
Assert(0, "acbd".to_owned()),
Merge(0,2),
Assert(0, "cbd".to_owned()),
Edit { ei: 1, p: 5, u: 1, d: parse_delta("-p-") },
Merge(1,2),
Assert(1, "pb".to_owned()),
Edit { ei: 1, p: 5, u: 1, d: parse_delta("--j") },
Assert(1, "pbj".to_owned()),
Edit { ei: 3, p: 7, u: 1, d: parse_delta("z--") },
Merge(2,3),
Merge(0,2), Merge(1, 2),
Assert(0, "zcbd".to_owned()),
Assert(1, "zpbj".to_owned()),
Merge(0,1),
Assert(0, "zcpbdj".to_owned()),
];
MergeTestState::new(4).run_script(&script[..]);
}
#[test]
fn merge_max_undo_so_far() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 0, p: 1, u: 1, d: parse_delta("ab") },
Merge(1,0), Merge(2,0),
AssertMaxUndoSoFar(1,1),
Edit { ei: 0, p: 1, u: 2, d: parse_delta("!-") },
Edit { ei: 1, p: 3, u: 3, d: parse_delta("-!") },
Merge(1,0),
AssertMaxUndoSoFar(1,3),
AssertMaxUndoSoFar(0,2),
Merge(0,1),
AssertMaxUndoSoFar(0,3),
Edit { ei: 2, p: 1, u: 1, d: parse_delta("!!") },
Merge(1,2),
AssertMaxUndoSoFar(1,3),
];
MergeTestState::new(3).run_script(&script[..]);
}
#[test]
fn merge_session_priorities() {
use self::MergeTestOp::*;
let script = vec![
Edit { ei: 0, p: 1, u: 1, d: parse_delta("ac") },
Merge(1,0),
Merge(2,0),
AssertAll("ac".to_owned()),
Edit { ei: 0, p: 1, u: 1, d: parse_delta("-d-") },
Assert(0, "adc".to_owned()),
Edit { ei: 1, p: 1, u: 1, d: parse_delta("-f-") },
Merge(2,1),
Assert(1, "afc".to_owned()),
Assert(2, "afc".to_owned()),
Merge(2,0),
Merge(0,1),
Assert(2, "adfc".to_owned()),
Assert(0, "adfc".to_owned()),
];
MergeTestState::new(3).run_script(&script[..]);
}
}