use std::cmp::Ordering;
use std::collections::{BTreeSet, HashSet};
use std::env;
use std::fmt::Debug;
use std::num::NonZeroU64;
use std::ops::RangeBounds;
use itertools::Itertools;
pub(crate) use crate::op_set2::change::ChangeCollector;
pub(crate) use crate::op_set2::types::ScalarValue;
pub(crate) use crate::op_set2::{
ChangeMetadata, KeyRef, OpQuery, OpQueryTerm, OpSet, OpType, Parents,
};
pub(crate) use crate::read::ReadDoc;
use crate::change_graph::ChangeGraph;
use crate::cursor::{CursorPosition, MoveCursor, OpCursor};
use crate::exid::ExId;
use crate::iter::{DiffIter, DocIter, Keys, ListRange, MapRange, Spans, Values};
use crate::marks::{Mark, MarkAccumulator, MarkSet};
use crate::patches::{Patch, PatchLog};
use crate::storage::{self, change, load, Bundle, CompressConfig, Document, VerificationMode};
use crate::transaction::{
self, CommitOptions, Failure, Success, Transactable, Transaction, TransactionArgs,
};
use crate::clock::{Clock, ClockRange};
use crate::hydrate;
use crate::types::{ActorId, ChangeHash, ObjId, ObjMeta, OpId, SequenceType, TextEncoding, Value};
use crate::{AutomergeError, Change, Cursor, ObjType, Prop};
pub(crate) mod current_state;
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum Actor {
Unused(ActorId),
Cached(usize),
}
impl Actor {
fn remove_actor(&mut self, index: usize, actors: &[ActorId]) {
if let Actor::Cached(idx) = self {
match (*idx).cmp(&index) {
Ordering::Equal => *self = Actor::Unused(actors[index].clone()),
Ordering::Greater => *idx -= 1,
Ordering::Less => (),
}
}
}
fn rewrite_with_new_actor(&mut self, index: usize) {
if let Actor::Cached(idx) = self {
if *idx >= index {
*idx += 1;
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum OnPartialLoad {
Ignore,
Error,
}
#[derive(Debug)]
pub enum StringMigration {
NoMigration,
ConvertToText,
}
#[derive(Debug)]
pub struct LoadOptions<'a> {
on_partial_load: OnPartialLoad,
verification_mode: VerificationMode,
string_migration: StringMigration,
patch_log: Option<&'a mut PatchLog>,
text_encoding: TextEncoding,
}
impl<'a> LoadOptions<'a> {
pub fn new() -> LoadOptions<'static> {
LoadOptions::default()
}
pub fn on_partial_load(self, on_partial_load: OnPartialLoad) -> Self {
Self {
on_partial_load,
..self
}
}
pub fn verification_mode(self, verification_mode: VerificationMode) -> Self {
Self {
verification_mode,
..self
}
}
pub fn patch_log(self, patch_log: &'a mut PatchLog) -> Self {
Self {
patch_log: Some(patch_log),
..self
}
}
pub fn migrate_strings(self, migration: StringMigration) -> Self {
Self {
string_migration: migration,
..self
}
}
pub fn text_encoding(self, text_encoding: TextEncoding) -> Self {
Self {
text_encoding,
..self
}
}
}
impl std::default::Default for LoadOptions<'static> {
fn default() -> Self {
Self {
on_partial_load: OnPartialLoad::Error,
verification_mode: VerificationMode::Check,
patch_log: None,
string_migration: StringMigration::NoMigration,
text_encoding: TextEncoding::platform_default(),
}
}
}
#[derive(Debug, Clone)]
pub struct Automerge {
pub(crate) queue: Vec<Change>,
pub(crate) change_graph: ChangeGraph,
deps: HashSet<ChangeHash>,
pub(crate) ops: OpSet,
actor: Actor,
max_op: u64,
}
impl Automerge {
pub fn new() -> Self {
Automerge {
queue: vec![],
change_graph: ChangeGraph::new(0),
ops: OpSet::new(TextEncoding::platform_default()),
deps: Default::default(),
actor: Actor::Unused(ActorId::random()),
max_op: 0,
}
}
pub fn new_with_encoding(encoding: TextEncoding) -> Self {
Automerge {
queue: vec![],
change_graph: ChangeGraph::new(0),
ops: OpSet::new(encoding),
deps: Default::default(),
actor: Actor::Unused(ActorId::random()),
max_op: 0,
}
}
pub(crate) fn ops_mut(&mut self) -> &mut OpSet {
&mut self.ops
}
pub(crate) fn ops(&self) -> &OpSet {
&self.ops
}
pub(crate) fn changes(&self) -> &ChangeGraph {
&self.change_graph
}
pub fn is_empty(&self) -> bool {
self.change_graph.is_empty() && self.queue.is_empty()
}
pub(crate) fn actor_id(&self) -> &ActorId {
match &self.actor {
Actor::Unused(id) => id,
Actor::Cached(idx) => self.ops.get_actor(*idx),
}
}
pub fn with_actor(mut self, actor: ActorId) -> Self {
self.set_actor(actor);
self
}
pub fn set_actor(&mut self, actor: ActorId) -> &mut Self {
match self.ops.actors.binary_search(&actor) {
Ok(idx) => self.actor = Actor::Cached(idx),
Err(_) => self.actor = Actor::Unused(actor),
}
self
}
pub fn get_actor(&self) -> &ActorId {
match &self.actor {
Actor::Unused(actor) => actor,
Actor::Cached(index) => self.ops.get_actor(*index),
}
}
pub(crate) fn remove_actor(&mut self, actor: usize) {
self.actor.remove_actor(actor, &self.ops.actors);
self.ops.remove_actor(actor);
self.change_graph.remove_actor(actor);
}
pub(crate) fn assert_no_unused_actors(&self, panic: bool) {
if self.ops.actors.len() != self.change_graph.actor_ids().count() {
let unused = self.change_graph.unused_actors().collect::<Vec<_>>();
log!("AUTOMERGE :: unused actor found when none expected");
log!(" :: ops={}", self.ops.actors.len());
log!(" :: graph={}", self.change_graph.all_actor_ids().count());
log!(" :: unused={:?}", unused);
log!(" :: actors={:?}", self.ops.actors);
assert!(!panic);
}
}
pub(crate) fn remove_unused_actors(&mut self, panic: bool) {
if panic {
self.assert_no_unused_actors(cfg!(debug_assertions));
}
while let Some(idx) = self.change_graph.unused_actors().last() {
self.remove_actor(idx);
}
}
fn get_or_create_actor_index(&mut self) -> usize {
match &self.actor {
Actor::Unused(actor) => {
let index = self.put_actor(actor.clone());
self.actor = Actor::Cached(index);
index
}
Actor::Cached(index) => *index,
}
}
fn get_actor_index(&self) -> Option<usize> {
match &self.actor {
Actor::Unused(_) => None,
Actor::Cached(index) => Some(*index),
}
}
pub fn transaction(&mut self) -> Transaction<'_> {
let args = self.transaction_args(None);
Transaction::new(self, args, PatchLog::inactive())
}
pub fn transaction_log_patches(&mut self, patch_log: PatchLog) -> Transaction<'_> {
let args = self.transaction_args(None);
Transaction::new(self, args, patch_log)
}
pub fn transaction_at(&mut self, patch_log: PatchLog, heads: &[ChangeHash]) -> Transaction<'_> {
let args = self.transaction_args(Some(heads));
Transaction::new(self, args, patch_log)
}
pub(crate) fn transaction_args(&mut self, heads: Option<&[ChangeHash]>) -> TransactionArgs {
let actor_index;
let seq;
let mut deps;
let scope;
match heads {
Some(heads) => {
deps = heads.to_vec();
let isolation = self.isolate_actor(heads);
actor_index = isolation.actor_index;
seq = isolation.seq;
scope = Some(isolation.clock);
}
None => {
actor_index = self.get_or_create_actor_index();
seq = self.change_graph.seq_for_actor(actor_index) + 1;
deps = self.get_heads();
scope = None;
if seq > 1 {
let last_hash = self.get_hash(actor_index, seq - 1).unwrap();
if !deps.contains(&last_hash) {
deps.push(last_hash);
}
}
}
}
let start_op = NonZeroU64::new(self.max_op + 1).unwrap();
let checkpoint = self.ops.save_checkpoint();
TransactionArgs {
actor_index,
seq,
start_op,
deps,
checkpoint,
scope,
}
}
pub fn transact<F, O, E>(&mut self, f: F) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
{
self.transact_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f)
}
pub fn transact_with<F, O, E, C>(&mut self, c: C, f: F) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
C: FnOnce(&O) -> CommitOptions,
{
self.transact_with_impl(Some(c), f)
}
fn transact_with_impl<F, O, E, C>(&mut self, c: Option<C>, f: F) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
C: FnOnce(&O) -> CommitOptions,
{
let mut tx = self.transaction();
let result = f(&mut tx);
match result {
Ok(result) => {
let (hash, patch_log) = if let Some(c) = c {
let commit_options = c(&result);
tx.commit_with(commit_options)
} else {
tx.commit()
};
Ok(Success {
result,
hash,
patch_log,
})
}
Err(error) => Err(Failure {
error,
cancelled: tx.rollback(),
}),
}
}
pub fn transact_and_log_patches<F, O, E>(&mut self, f: F) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
{
self.transact_and_log_patches_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f)
}
pub fn transact_and_log_patches_with<F, O, E, C>(
&mut self,
c: C,
f: F,
) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
C: FnOnce(&O) -> CommitOptions,
{
self.transact_and_log_patches_with_impl(Some(c), f)
}
fn transact_and_log_patches_with_impl<F, O, E, C>(
&mut self,
c: Option<C>,
f: F,
) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
C: FnOnce(&O) -> CommitOptions,
{
let mut tx = self.transaction_log_patches(PatchLog::active());
let result = f(&mut tx);
match result {
Ok(result) => {
let (hash, history) = if let Some(c) = c {
let commit_options = c(&result);
tx.commit_with(commit_options)
} else {
tx.commit()
};
Ok(Success {
result,
hash,
patch_log: history,
})
}
Err(error) => Err(Failure {
error,
cancelled: tx.rollback(),
}),
}
}
pub fn empty_commit(&mut self, opts: CommitOptions) -> ChangeHash {
let args = self.transaction_args(None);
Transaction::empty(self, args, opts)
}
pub fn fork(&self) -> Self {
let mut f = self.clone();
f.set_actor(ActorId::random());
f
}
pub fn fork_at(&self, heads: &[ChangeHash]) -> Result<Self, AutomergeError> {
let mut seen = heads.iter().cloned().collect::<HashSet<_>>();
let mut heads = heads.to_vec();
let mut hashes = vec![];
while let Some(hash) = heads.pop() {
if !self.change_graph.has_change(&hash) {
return Err(AutomergeError::InvalidHash(hash));
}
for dep in self.change_graph.deps_for_hash(&hash) {
if !seen.contains(&dep) {
heads.push(dep);
}
}
hashes.push(hash);
seen.insert(hash);
}
let mut f = Self::new();
f.set_actor(ActorId::random());
let changes = self.get_changes_by_hashes(hashes.into_iter().rev().collect())?;
f.apply_changes(changes)?;
Ok(f)
}
pub(crate) fn get_changes_by_hashes(
&self,
hashes: Vec<ChangeHash>,
) -> Result<Vec<Change>, AutomergeError> {
ChangeCollector::for_hashes(&self.ops, &self.change_graph, hashes.clone())
}
pub(crate) fn exid_to_opid(&self, id: &ExId) -> Result<OpId, AutomergeError> {
match id {
ExId::Root => Ok(OpId::new(0, 0)),
ExId::Id(ctr, actor, idx) => {
let opid = if self.ops.get_actor_safe(*idx) == Some(actor) {
OpId::new(*ctr, *idx)
} else if let Some(backup_idx) = self.ops.lookup_actor(actor) {
OpId::new(*ctr, backup_idx)
} else {
return Err(AutomergeError::InvalidObjId(id.to_string()));
};
Ok(opid)
}
}
}
pub(crate) fn get_obj_meta(&self, id: ObjId) -> Result<ObjMeta, AutomergeError> {
if id.is_root() {
Ok(ObjMeta::root())
} else if let Some(typ) = self.ops.object_type(&id) {
Ok(ObjMeta { id, typ })
} else {
Err(AutomergeError::NotAnObject)
}
}
pub(crate) fn op_cursor_to_opid(
&self,
cursor: &OpCursor,
clock: Option<&Clock>,
) -> Result<OpId, AutomergeError> {
if let Some(idx) = self.ops.lookup_actor(&cursor.actor) {
let opid = OpId::new(cursor.ctr, idx);
match clock {
Some(clock) if !clock.covers(&opid) => {
Err(AutomergeError::InvalidCursor(Cursor::Op(cursor.clone())))
}
_ => Ok(opid),
}
} else {
Err(AutomergeError::InvalidCursor(Cursor::Op(cursor.clone())))
}
}
pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<ObjMeta, AutomergeError> {
let opid = self.exid_to_opid(id)?;
let obj = ObjId(opid);
self.get_obj_meta(obj)
}
pub(crate) fn id_to_exid(&self, id: OpId) -> ExId {
self.ops.id_to_exid(id)
}
pub fn diff_opset(&self, other: &Self) -> Result<(), AutomergeError> {
let (ops_meta1, ops_out1) = self.ops.export();
let (ops_meta2, ops_out2) = other.ops.export();
if ops_meta1 != ops_meta2 {
let specs: std::collections::BTreeSet<_> = ops_meta1
.0
.iter()
.chain(ops_meta2.0.iter())
.map(|c| c.spec())
.collect();
for s in specs {
let d1 = ops_meta1
.0
.iter()
.find(|c| c.spec() == s)
.map(|c| c.data())
.unwrap_or(0..0);
let d2 = ops_meta2
.0
.iter()
.find(|c| c.spec() == s)
.map(|c| c.data())
.unwrap_or(0..0);
let d1 = &ops_out1[d1];
let d2 = &ops_out2[d2];
if d1 != d2 {
log!(" s={:?}|{:?} ", s.id(), s.col_type());
log!(" {:?} ", d1);
log!(" {:?} ", d2);
OpSet::decode(s, d1);
OpSet::decode(s, d2);
}
}
}
Ok(())
}
pub fn load(data: &[u8]) -> Result<Self, AutomergeError> {
Self::load_with_options(data, Default::default())
}
pub fn load_unverified_heads(data: &[u8]) -> Result<Self, AutomergeError> {
Self::load_with_options(
data,
LoadOptions {
verification_mode: VerificationMode::DontCheck,
..Default::default()
},
)
}
#[deprecated(since = "0.5.2", note = "Use `load_with_options` instead")]
#[tracing::instrument(skip(data), err)]
pub fn load_with(
data: &[u8],
on_error: OnPartialLoad,
mode: VerificationMode,
patch_log: &mut PatchLog,
) -> Result<Self, AutomergeError> {
Self::load_with_options(
data,
LoadOptions::new()
.on_partial_load(on_error)
.verification_mode(mode)
.patch_log(patch_log),
)
}
#[tracing::instrument(skip(data), err)]
pub fn load_with_options(
data: &[u8],
options: LoadOptions<'_>,
) -> Result<Self, AutomergeError> {
if data.is_empty() {
tracing::trace!("no data, initializing empty document");
return Ok(Self::new());
}
tracing::trace!("loading first chunk");
let (remaining, first_chunk) = storage::Chunk::parse(storage::parse::Input::new(data))
.map_err(|e| load::Error::Parse(Box::new(e)))?;
if !first_chunk.checksum_valid() {
return Err(load::Error::BadChecksum.into());
}
let mut changes = vec![];
let mut first_chunk_was_doc = false;
let mut am = match first_chunk {
storage::Chunk::Document(d) => {
tracing::trace!("first chunk is document chunk, inflating");
first_chunk_was_doc = true;
reconstruct_document(&d, options.verification_mode, options.text_encoding)?
}
storage::Chunk::Change(stored_change) => {
tracing::trace!("first chunk is change chunk");
changes.push(
Change::new_from_unverified(stored_change.into_owned(), None)
.map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?,
);
Self::new()
}
storage::Chunk::Bundle(bundle) => {
tracing::trace!("first chunk is change chunk");
let bundle = Bundle::new_from_unverified(bundle.into_owned())
.map_err(|e| load::Error::InvalidBundleColumn(Box::new(e)))?;
let bundle_changes = bundle
.to_changes()
.map_err(|e| load::Error::InvalidBundleChange(Box::new(e)))?;
changes.extend(bundle_changes);
Self::new()
}
storage::Chunk::CompressedChange(stored_change, compressed) => {
tracing::trace!("first chunk is compressed change");
changes.push(
Change::new_from_unverified(
stored_change.into_owned(),
Some(compressed.into_owned()),
)
.map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?,
);
Self::new()
}
};
tracing::trace!("loading change chunks");
match load::load_changes(remaining.reset(), options.text_encoding, &am.change_graph) {
load::LoadedChanges::Complete(c) => {
am.apply_changes(changes.into_iter().chain(c))?;
if !am.queue.is_empty()
&& !first_chunk_was_doc
&& options.on_partial_load == OnPartialLoad::Error
{
return Err(AutomergeError::MissingDeps);
}
}
load::LoadedChanges::Partial { error, .. } => {
if options.on_partial_load == OnPartialLoad::Error {
return Err(error.into());
}
}
}
if let StringMigration::ConvertToText = options.string_migration {
am.convert_scalar_strings_to_text()?;
}
if let Some(patch_log) = options.patch_log {
if patch_log.is_active() {
am.log_current_state(patch_log);
}
}
Ok(am)
}
pub fn make_patches(&self, patch_log: &mut PatchLog) -> Vec<Patch> {
patch_log.make_patches(self)
}
pub fn current_state(&self) -> Vec<Patch> {
let mut patch_log = PatchLog::active();
self.log_current_state(&mut patch_log);
patch_log.make_patches(self)
}
pub fn load_incremental(&mut self, data: &[u8]) -> Result<usize, AutomergeError> {
self.load_incremental_log_patches(data, &mut PatchLog::inactive())
}
pub fn load_incremental_log_patches(
&mut self,
data: &[u8],
patch_log: &mut PatchLog,
) -> Result<usize, AutomergeError> {
if self.is_empty() {
let mut doc = Self::load_with_options(
data,
LoadOptions::new()
.on_partial_load(OnPartialLoad::Ignore)
.verification_mode(VerificationMode::Check),
)?;
doc = doc.with_actor(self.actor_id().clone());
if patch_log.is_active() {
doc.log_current_state(patch_log);
}
*self = doc;
return Ok(self.ops.len());
}
let changes = match load::load_changes(
storage::parse::Input::new(data),
self.text_encoding(),
&self.change_graph,
) {
load::LoadedChanges::Complete(c) => c,
load::LoadedChanges::Partial { error, loaded, .. } => {
tracing::warn!(successful_chunks=loaded.len(), err=?error, "partial load");
loaded
}
};
let start = self.ops.len();
self.apply_changes_log_patches(changes, patch_log)?;
let delta = self.ops.len() - start;
Ok(delta)
}
pub(crate) fn log_current_state(&self, patch_log: &mut PatchLog) {
let clock = ClockRange::default();
let path_map = DiffIter::log(self, ObjMeta::root(), clock, patch_log);
patch_log.path_hint(path_map);
}
fn seq_for_actor(&self, actor: &ActorId) -> u64 {
self.ops
.lookup_actor(actor)
.map(|idx| self.change_graph.seq_for_actor(idx))
.unwrap_or(0)
}
pub(crate) fn has_actor_seq(&self, change: &Change) -> bool {
self.seq_for_actor(change.actor_id()) >= change.seq()
}
pub fn apply_changes(
&mut self,
changes: impl IntoIterator<Item = Change> + Clone,
) -> Result<(), AutomergeError> {
self.apply_changes_log_patches(changes, &mut PatchLog::inactive())
}
pub fn apply_changes_log_patches<I: IntoIterator<Item = Change> + Clone>(
&mut self,
changes: I,
patch_log: &mut PatchLog,
) -> Result<(), AutomergeError> {
self.apply_changes_batch_log_patches(changes, patch_log)
}
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ChangeHash>, AutomergeError> {
self.merge_and_log_patches(other, &mut PatchLog::inactive())
}
pub fn merge_and_log_patches(
&mut self,
other: &mut Self,
patch_log: &mut PatchLog,
) -> Result<Vec<ChangeHash>, AutomergeError> {
let changes = self.get_changes_added(other);
tracing::trace!(changes=?changes.iter().map(|c| c.hash()).collect::<Vec<_>>(), "merging new changes");
self.apply_changes_log_patches(changes, patch_log)?;
Ok(self.get_heads())
}
pub fn bundle<I>(&self, hashes: I) -> Result<Bundle, AutomergeError>
where
I: IntoIterator<Item = ChangeHash>,
{
Bundle::for_hashes(&self.ops, &self.change_graph, hashes)
}
pub fn save_with_options(&self, options: SaveOptions) -> Vec<u8> {
self.assert_no_unused_actors(true);
let doc = Document::new(&self.ops, &self.change_graph, options.compress());
let mut bytes = doc.into_bytes();
if options.retain_orphans {
for orphaned in self.queue.iter() {
bytes.extend(orphaned.raw_bytes());
}
}
bytes
}
#[cfg(test)]
pub fn debug_cmp(&self, other: &Self) {
self.ops.debug_cmp(&other.ops);
}
pub fn save(&self) -> Vec<u8> {
self.save_with_options(SaveOptions::default())
}
pub fn save_and_verify(&self) -> Result<Vec<u8>, AutomergeError> {
let bytes = self.save();
Self::load(&bytes)?;
Ok(bytes)
}
pub fn save_nocompress(&self) -> Vec<u8> {
self.save_with_options(SaveOptions {
deflate: false,
..Default::default()
})
}
pub fn save_after(&self, heads: &[ChangeHash]) -> Vec<u8> {
let changes = self.get_changes(heads);
let mut bytes = vec![];
for c in changes {
bytes.extend(c.raw_bytes());
}
bytes
}
pub(crate) fn filter_changes(
&self,
heads: &[ChangeHash],
changes: &mut BTreeSet<ChangeHash>,
) -> Result<(), AutomergeError> {
let heads = heads
.iter()
.filter(|hash| self.has_change(hash))
.copied()
.collect::<Vec<_>>();
self.change_graph.remove_ancestors(changes, &heads);
Ok(())
}
pub fn get_last_local_change(&self) -> Option<Change> {
let actor = self.get_actor_index()?;
let seq = self.change_graph.seq_for_actor(actor);
let hash = self.change_graph.get_hash_for_actor_seq(actor, seq).ok()?;
self.get_change_by_hash(&hash)
}
pub(crate) fn clock_range(&self, before: &[ChangeHash], after: &[ChangeHash]) -> ClockRange {
let before = self.clock_at(before);
let after = self.clock_at(after);
ClockRange::Diff(before, after)
}
pub(crate) fn clock_at(&self, heads: &[ChangeHash]) -> Clock {
self.change_graph.clock_for_heads(heads)
}
fn get_isolated_actor_index(&mut self, level: usize) -> usize {
if level == 0 {
self.get_or_create_actor_index()
} else {
let base_actor = self.get_actor();
let new_actor = base_actor.with_concurrency(level);
self.put_actor(new_actor)
}
}
pub(crate) fn isolate_actor(&mut self, heads: &[ChangeHash]) -> Isolation {
let mut actor_index = self.get_isolated_actor_index(0);
let mut clock = self.clock_at(heads);
for i in 1.. {
let max_op = self.change_graph.max_op_for_actor(actor_index);
if max_op == 0 || clock.covers(&OpId::new(max_op, actor_index)) {
clock.isolate(actor_index);
break;
}
actor_index = self.get_isolated_actor_index(i);
clock = self.clock_at(heads); }
let seq = self.change_graph.seq_for_actor(actor_index) + 1;
Isolation {
actor_index,
seq,
clock,
}
}
fn get_hash(&self, actor: usize, seq: u64) -> Result<ChangeHash, AutomergeError> {
self.change_graph.get_hash_for_actor_seq(actor, seq)
}
pub(crate) fn update_history(&mut self, change: &Change, num_ops: usize) {
self.max_op = std::cmp::max(self.max_op, change.start_op().get() + num_ops as u64 - 1);
self.update_deps(change);
let actor_index = self
.ops
.actors
.binary_search(change.actor_id())
.expect("Change's actor not already in the document");
self.change_graph
.add_change(change, actor_index)
.expect("Change's deps should already be in the document");
}
fn insert_actor(&mut self, index: usize, actor: ActorId) -> usize {
self.ops.insert_actor(index, actor);
self.change_graph.insert_actor(index);
self.actor.rewrite_with_new_actor(index);
index
}
pub(crate) fn put_actor_ref(&mut self, actor: &ActorId) -> usize {
match self.ops.actors.binary_search(actor) {
Ok(idx) => idx,
Err(idx) => self.insert_actor(idx, actor.clone()),
}
}
pub(crate) fn put_actor(&mut self, actor: ActorId) -> usize {
match self.ops.actors.binary_search(&actor) {
Ok(idx) => idx,
Err(idx) => self.insert_actor(idx, actor),
}
}
fn update_deps(&mut self, change: &Change) {
for d in change.deps() {
self.deps.remove(d);
}
self.deps.insert(change.hash());
}
#[doc(hidden)]
pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> {
let obj = self.import_obj(s)?;
if obj == ExId::Root {
Ok((ExId::Root, ObjType::Map))
} else {
let obj_type = self
.object_type(&obj)
.map_err(|_| AutomergeError::InvalidObjId(s.to_owned()))?;
Ok((obj, obj_type))
}
}
#[doc(hidden)]
pub fn import_obj(&self, s: &str) -> Result<ExId, AutomergeError> {
if s == "_root" {
Ok(ExId::Root)
} else {
let n = s
.find('@')
.ok_or_else(|| AutomergeError::InvalidObjIdFormat(s.to_owned()))?;
let counter = s[0..n]
.parse()
.map_err(|_| AutomergeError::InvalidObjIdFormat(s.to_owned()))?;
let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap());
let actor = self
.ops
.lookup_actor(&actor)
.ok_or_else(|| AutomergeError::InvalidObjId(s.to_owned()))?;
let obj = ExId::Id(counter, self.ops.get_actor(actor).clone(), actor);
Ok(obj)
}
}
pub fn dump(&self) {
self.ops.dump();
}
pub fn diff(&self, before_heads: &[ChangeHash], after_heads: &[ChangeHash]) -> Vec<Patch> {
let clock = self.clock_range(before_heads, after_heads);
let mut patch_log = PatchLog::active();
DiffIter::log(self, ObjMeta::root(), clock, &mut patch_log);
patch_log.heads = Some(after_heads.to_vec());
patch_log.make_patches(self)
}
pub fn get_heads(&self) -> Vec<ChangeHash> {
let mut deps: Vec<_> = self.deps.iter().copied().collect();
deps.sort_unstable();
deps
}
pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<Change> {
ChangeCollector::exclude_hashes(&self.ops, &self.change_graph, have_deps)
}
pub fn get_changes_meta(&self, have_deps: &[ChangeHash]) -> Vec<ChangeMetadata<'_>> {
ChangeCollector::exclude_hashes_meta(&self.ops, &self.change_graph, have_deps)
}
pub fn get_change_meta_by_hash(&self, hash: &ChangeHash) -> Option<ChangeMetadata<'_>> {
ChangeCollector::meta_for_hashes(&self.ops, &self.change_graph, [*hash])
.ok()?
.pop()
}
pub fn get_changes_added(&self, other: &Self) -> Vec<Change> {
let mut stack: Vec<_> = other.get_heads();
tracing::trace!(their_heads=?stack, "finding changes to merge");
let mut seen_hashes = HashSet::new();
let mut added_change_hashes = Vec::new();
while let Some(hash) = stack.pop() {
if !seen_hashes.contains(&hash) && !self.has_change(&hash) {
seen_hashes.insert(hash);
added_change_hashes.push(hash);
stack.extend(other.change_graph.deps_for_hash(&hash));
}
}
added_change_hashes.reverse();
other.get_changes_by_hashes(added_change_hashes).unwrap()
}
pub fn hash_for_opid(&self, exid: &ExId) -> Option<ChangeHash> {
match exid {
ExId::Root => None,
ExId::Id(..) => {
let opid = self.exid_to_opid(exid).ok()?;
self.change_graph.opid_to_hash(opid)
}
}
}
fn calculate_marks(
&self,
obj: &ExId,
clock: Option<Clock>,
) -> Result<Vec<Mark>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let mut top_ops = self
.ops()
.iter_obj(&obj.id)
.visible_slow(clock)
.top_ops()
.marks();
let Some(seq_type) = obj.typ.as_sequence_type() else {
return Ok(Vec::new());
};
let mut index = 0;
let mut acc = MarkAccumulator::default();
let mut last_marks = None;
let mut mark_len = 0;
let mut mark_index = 0;
while let Some(o) = top_ops.next() {
let marks = top_ops.get_marks();
let len = o.width(seq_type, self.text_encoding());
if last_marks.as_ref() != marks {
match last_marks.as_ref() {
Some(m) if mark_len > 0 => acc.add(mark_index, mark_len, m),
_ => (),
}
last_marks = marks.cloned();
mark_index = index;
mark_len = 0;
}
mark_len += len;
index += len;
}
match last_marks.as_ref() {
Some(m) if mark_len > 0 => acc.add(mark_index, mark_len, m),
_ => (),
}
Ok(acc.into_iter_no_unmark().collect())
}
pub fn hydrate(&self, heads: Option<&[ChangeHash]>) -> hydrate::Value {
let clock = heads.map(|heads| self.clock_at(heads));
self.hydrate_map(&ObjId::root(), clock.as_ref())
}
pub(crate) fn hydrate_obj(
&self,
obj: &crate::ObjId,
heads: Option<&[ChangeHash]>,
) -> Result<hydrate::Value, AutomergeError> {
let obj = self.exid_to_obj(obj)?;
let clock = heads.map(|heads| self.clock_at(heads));
Ok(match obj.typ {
ObjType::Map | ObjType::Table => self.hydrate_map(&obj.id, clock.as_ref()),
ObjType::List => self.hydrate_list(&obj.id, clock.as_ref()),
ObjType::Text => self.hydrate_text(&obj.id, clock.as_ref()),
})
}
pub(crate) fn parents_for(
&self,
obj: &ExId,
clock: Option<Clock>,
) -> Result<Parents<'_>, AutomergeError> {
let obj = self.exid_to_obj(obj)?;
Ok(self.ops.parents(obj.id, clock))
}
pub(crate) fn keys_for(&self, obj: &ExId, clock: Option<Clock>) -> Keys<'_> {
self.exid_to_obj(obj)
.ok()
.map(|obj| self.ops.keys(&obj.id, clock))
.unwrap_or_default()
}
pub(crate) fn iter_for(&self, obj: &ExId, clock: Option<Clock>) -> DocIter<'_> {
self.exid_to_obj(obj)
.ok()
.map(|obj| DocIter::new(self, obj, clock))
.unwrap_or_else(|| DocIter::empty(self.text_encoding()))
}
pub(crate) fn map_range_for<'a, R: RangeBounds<String> + 'a>(
&'a self,
obj: &ExId,
range: R,
clock: Option<Clock>,
) -> MapRange<'a> {
self.exid_to_obj(obj)
.ok()
.map(|obj| self.ops.map_range(&obj.id, range, clock))
.unwrap_or_default()
}
pub(crate) fn list_range_for<R: RangeBounds<usize>>(
&self,
obj: &ExId,
range: R,
clock: Option<Clock>,
) -> ListRange<'_> {
self.exid_to_obj(obj)
.ok()
.map(|obj| self.ops.list_range(&obj.id, range, clock))
.unwrap_or_default()
}
pub(crate) fn values_for(&self, obj: &ExId, clock: Option<Clock>) -> Values<'_> {
self.exid_to_obj(obj)
.ok()
.map(|obj| Values::new(&self.ops, self.ops.top_ops(&obj.id, clock.clone()), clock))
.unwrap_or_default()
}
pub(crate) fn length_for(&self, obj: &ExId, clock: Option<Clock>) -> usize {
self.exid_to_obj(obj)
.map(|obj| self.ops.seq_length(&obj.id, self.text_encoding(), clock))
.unwrap_or(0)
}
pub(crate) fn text_for(
&self,
obj: &ExId,
clock: Option<Clock>,
) -> Result<String, AutomergeError> {
let obj = self.exid_to_obj(obj)?;
Ok(self.ops.text(&obj.id, clock))
}
pub(crate) fn spans_for(
&self,
obj: &ExId,
clock: Option<Clock>,
) -> Result<Spans<'_>, AutomergeError> {
let obj = self.exid_to_obj(obj)?;
Ok(Spans::new(self.ops.spans(&obj.id, clock)))
}
pub(crate) fn get_cursor_for(
&self,
obj: &ExId,
position: CursorPosition,
clock: Option<Clock>,
move_cursor: MoveCursor,
) -> Result<Cursor, AutomergeError> {
let obj = self.exid_to_obj(obj)?;
let Some(seq_type) = obj.typ.as_sequence_type() else {
return Err(AutomergeError::InvalidOp(obj.typ));
};
match position {
CursorPosition::Start => Ok(Cursor::Start),
CursorPosition::End => Ok(Cursor::End),
CursorPosition::Index(i) => {
let found = self
.ops
.seek_ops_by_index(&obj.id, i, seq_type, clock.as_ref());
if let Some(op) = found.ops.last() {
Ok(Cursor::Op(OpCursor::new(op.id, &self.ops, move_cursor)))
} else {
Err(AutomergeError::InvalidIndex(i))
}
}
}
}
pub(crate) fn get_cursor_position_for(
&self,
obj: &ExId,
cursor: &Cursor,
clock: Option<Clock>,
) -> Result<usize, AutomergeError> {
match cursor {
Cursor::Start => Ok(0),
Cursor::End => Ok(self.length_for(obj, clock)),
Cursor::Op(op) => {
let obj_meta = self.exid_to_obj(obj)?;
let Some(seq_type) = obj_meta.typ.as_sequence_type() else {
return Err(AutomergeError::InvalidCursor(cursor.clone()));
};
let opid = self.op_cursor_to_opid(op, clock.as_ref())?;
let found = self
.ops
.seek_list_opid(&obj_meta.id, opid, seq_type, clock.as_ref())
.ok_or_else(|| AutomergeError::InvalidCursor(cursor.clone()))?;
match op.move_cursor {
MoveCursor::After => Ok(found.index),
MoveCursor::Before => {
if found.visible || found.index == 0 {
Ok(found.index)
} else {
let mut key = found
.op.key.elemid()
.expect("failed to retrieve initial cursor op key for MoveCursor::Before")
.0;
loop {
let f = self.ops.seek_list_opid(
&obj_meta.id,
key,
seq_type,
clock.as_ref(),
);
match f {
Some(f) => {
if f.visible {
return Ok(f.index);
}
key = f
.op
.key
.elemid()
.expect(
"failed to retrieve op key in MoveCursor::Before",
)
.0;
}
None => break Ok(0),
}
}
}
}
}
}
}
}
pub(crate) fn marks_for(
&self,
obj: &ExId,
clock: Option<Clock>,
) -> Result<Vec<Mark>, AutomergeError> {
self.calculate_marks(obj, clock)
}
pub(crate) fn get_for(
&self,
obj: &ExId,
prop: Prop,
clock: Option<Clock>,
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError> {
let obj = self.exid_to_obj(obj)?;
let op = match (obj.typ, prop) {
(ObjType::Map | ObjType::Table, Prop::Map(key)) => self
.ops
.seek_ops_by_map_key(&obj.id, &key, clock.as_ref())
.ops
.into_iter()
.next_back()
.map(|op| op.tagged_value(self.ops())),
(ObjType::List | ObjType::Text, Prop::Seq(i)) => {
let seq_type = obj
.typ
.as_sequence_type()
.expect("list and text must have a sequence type");
self.ops
.seek_ops_by_index(&obj.id, i, seq_type, clock.as_ref())
.ops
.into_iter()
.next_back()
.map(|op| op.tagged_value(self.ops()))
}
_ => return Err(AutomergeError::InvalidOp(obj.typ)),
};
Ok(op)
}
pub(crate) fn get_all_for<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
clock: Option<Clock>,
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError> {
let prop = prop.into();
let obj = self.exid_to_obj(obj.as_ref())?;
let values = match (obj.typ, prop) {
(ObjType::Map | ObjType::Table, Prop::Map(key)) => self
.ops
.seek_ops_by_map_key(&obj.id, &key, clock.as_ref())
.ops
.into_iter()
.map(|op| op.tagged_value(self.ops()))
.collect::<Vec<_>>(),
(ObjType::List | ObjType::Text, Prop::Seq(i)) => {
let seq_type = obj
.typ
.as_sequence_type()
.expect("list and text must have a sequence type");
self.ops
.seek_ops_by_index(&obj.id, i, seq_type, clock.as_ref())
.ops
.into_iter()
.map(|op| op.tagged_value(self.ops()))
.collect::<Vec<_>>()
}
_ => return Err(AutomergeError::InvalidOp(obj.typ)),
};
assert_eq!(
values.iter().map(|v| &v.1).collect::<Vec<_>>(),
values.iter().map(|v| &v.1).sorted().collect::<Vec<_>>()
);
Ok(values)
}
pub(crate) fn get_marks_for<O: AsRef<ExId>>(
&self,
obj: O,
index: usize,
clock: Option<Clock>,
) -> Result<MarkSet, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let mut iter = self
.ops
.iter_obj(&obj.id)
.visible_slow(clock)
.top_ops()
.marks();
iter.nth(index);
match iter.get_marks() {
Some(arc) => Ok(arc.as_ref().clone().without_unmarks()),
None => Ok(MarkSet::default()),
}
}
fn convert_scalar_strings_to_text(&mut self) -> Result<(), AutomergeError> {
struct Conversion {
obj_id: ExId,
prop: Prop,
text: smol_str::SmolStr,
}
let mut to_convert = Vec::new();
for (obj, ops) in self.ops.iter_objs() {
match obj.typ {
ObjType::Map | ObjType::List => {
for op in ops.visible_slow(None) {
if let OpType::Put(ScalarValue::Str(s)) = op.op_type() {
let prop = match op.key {
KeyRef::Map(prop) => Prop::Map(prop.into()),
KeyRef::Seq(_) => {
let Some(found) = self.ops.seek_list_opid(
&obj.id,
op.id,
SequenceType::List,
None,
) else {
continue;
};
Prop::Seq(found.index)
}
};
to_convert.push(Conversion {
obj_id: self.ops.id_to_exid(obj.id.0),
prop,
text: smol_str::SmolStr::from(s),
})
}
}
}
_ => {}
}
}
if !to_convert.is_empty() {
let mut tx = self.transaction();
for Conversion { obj_id, prop, text } in to_convert {
let text_id = tx.put_object(obj_id, prop, ObjType::Text)?;
tx.splice_text(&text_id, 0, 0, &text)?;
}
tx.commit();
}
Ok(())
}
pub fn has_our_changes(&self, other: &crate::sync::State) -> bool {
other.shared_heads == self.get_heads()
}
pub(crate) fn has_change(&self, head: &ChangeHash) -> bool {
self.change_graph.has_change(head)
}
pub fn text_encoding(&self) -> TextEncoding {
self.ops.text_encoding
}
}
impl ReadDoc for Automerge {
fn parents<O: AsRef<ExId>>(&self, obj: O) -> Result<Parents<'_>, AutomergeError> {
self.parents_for(obj.as_ref(), None)
}
fn parents_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Parents<'_>, AutomergeError> {
let clock = self.clock_at(heads);
self.parents_for(obj.as_ref(), Some(clock))
}
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_> {
self.keys_for(obj.as_ref(), None)
}
fn keys_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Keys<'_> {
let clock = self.clock_at(heads);
self.keys_for(obj.as_ref(), Some(clock))
}
fn iter_at<O: AsRef<ExId>>(&self, obj: O, heads: Option<&[ChangeHash]>) -> DocIter<'_> {
let clock = heads.map(|heads| self.clock_at(heads));
self.iter_for(obj.as_ref(), clock)
}
fn map_range<'a, O: AsRef<ExId>, R: RangeBounds<String> + 'a>(
&'a self,
obj: O,
range: R,
) -> MapRange<'a> {
self.map_range_for(obj.as_ref(), range, None)
}
fn map_range_at<'a, O: AsRef<ExId>, R: RangeBounds<String> + 'a>(
&'a self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> MapRange<'a> {
let clock = self.clock_at(heads);
self.map_range_for(obj.as_ref(), range, Some(clock))
}
fn list_range<O: AsRef<ExId>, R: RangeBounds<usize>>(&self, obj: O, range: R) -> ListRange<'_> {
self.list_range_for(obj.as_ref(), range, None)
}
fn list_range_at<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> ListRange<'_> {
let clock = self.clock_at(heads);
self.list_range_for(obj.as_ref(), range, Some(clock))
}
fn values<O: AsRef<ExId>>(&self, obj: O) -> Values<'_> {
self.values_for(obj.as_ref(), None)
}
fn values_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> {
let clock = self.clock_at(heads);
self.values_for(obj.as_ref(), Some(clock))
}
fn length<O: AsRef<ExId>>(&self, obj: O) -> usize {
self.length_for(obj.as_ref(), None)
}
fn length_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> usize {
let clock = self.clock_at(heads);
self.length_for(obj.as_ref(), Some(clock))
}
fn text<O: AsRef<ExId>>(&self, obj: O) -> Result<String, AutomergeError> {
self.text_for(obj.as_ref(), None)
}
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Spans<'_>, AutomergeError> {
self.spans_for(obj.as_ref(), None)
}
fn spans_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Spans<'_>, AutomergeError> {
let clock = self.clock_at(heads);
self.spans_for(obj.as_ref(), Some(clock))
}
fn get_cursor<O: AsRef<ExId>, I: Into<CursorPosition>>(
&self,
obj: O,
position: I,
at: Option<&[ChangeHash]>,
) -> Result<Cursor, AutomergeError> {
let clock = at.map(|heads| self.clock_at(heads));
self.get_cursor_for(obj.as_ref(), position.into(), clock, MoveCursor::After)
}
fn get_cursor_moving<O: AsRef<ExId>, I: Into<CursorPosition>>(
&self,
obj: O,
position: I,
at: Option<&[ChangeHash]>,
move_cursor: MoveCursor,
) -> Result<Cursor, AutomergeError> {
let clock = at.map(|heads| self.clock_at(heads));
self.get_cursor_for(obj.as_ref(), position.into(), clock, move_cursor)
}
fn get_cursor_position<O: AsRef<ExId>>(
&self,
obj: O,
cursor: &Cursor,
at: Option<&[ChangeHash]>,
) -> Result<usize, AutomergeError> {
let clock = at.map(|heads| self.clock_at(heads));
self.get_cursor_position_for(obj.as_ref(), cursor, clock)
}
fn text_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<String, AutomergeError> {
let clock = self.clock_at(heads);
self.text_for(obj.as_ref(), Some(clock))
}
fn marks<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<Mark>, AutomergeError> {
self.marks_for(obj.as_ref(), None)
}
fn marks_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Vec<Mark>, AutomergeError> {
let clock = self.clock_at(heads);
self.marks_for(obj.as_ref(), Some(clock))
}
fn hydrate<O: AsRef<ExId>>(
&self,
obj: O,
heads: Option<&[ChangeHash]>,
) -> Result<hydrate::Value, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let clock = heads.map(|h| self.clock_at(h));
Ok(match obj.typ {
ObjType::List => self.hydrate_list(&obj.id, clock.as_ref()),
ObjType::Text => self.hydrate_text(&obj.id, clock.as_ref()),
_ => self.hydrate_map(&obj.id, clock.as_ref()),
})
}
fn get_marks<O: AsRef<ExId>>(
&self,
obj: O,
index: usize,
heads: Option<&[ChangeHash]>,
) -> Result<MarkSet, AutomergeError> {
let clock = heads.map(|h| self.clock_at(h));
self.get_marks_for(obj.as_ref(), index, clock)
}
fn get<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError> {
self.get_for(obj.as_ref(), prop.into(), None)
}
fn get_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError> {
let clock = Some(self.clock_at(heads));
self.get_for(obj.as_ref(), prop.into(), clock)
}
fn get_all<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError> {
self.get_all_for(obj.as_ref(), prop.into(), None)
}
fn get_all_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError> {
let clock = Some(self.clock_at(heads));
self.get_all_for(obj.as_ref(), prop.into(), clock)
}
fn object_type<O: AsRef<ExId>>(&self, obj: O) -> Result<ObjType, AutomergeError> {
let obj = obj.as_ref();
let opid = self.exid_to_opid(obj)?;
let typ = self.ops.object_type(&ObjId(opid));
typ.ok_or_else(|| AutomergeError::InvalidObjId(obj.to_string()))
}
#[inline(never)]
fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec<ChangeHash> {
let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect();
let mut missing = HashSet::new();
for head in self.queue.iter().flat_map(|change| change.deps()) {
if !self.has_change(head) {
missing.insert(head);
}
}
for head in heads {
if !self.has_change(head) {
missing.insert(head);
}
}
let mut missing = missing
.into_iter()
.filter(|hash| !in_queue.contains(hash))
.copied()
.collect::<Vec<_>>();
missing.sort();
missing
}
fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<Change> {
ChangeCollector::for_hashes(&self.ops, &self.change_graph, [*hash])
.ok()?
.pop()
}
fn stats(&self) -> crate::read::Stats {
let num_changes = self.change_graph.len() as u64;
let num_ops = self.ops.len() as u64;
let num_actors = self.ops.actors.len() as u64;
let cargo_package_name = env!("CARGO_PKG_NAME");
let cargo_package_version = env!("CARGO_PKG_VERSION");
let rustc_version = env!("CARGO_PKG_RUST_VERSION");
crate::read::Stats {
num_changes,
num_ops,
num_actors,
cargo_package_name,
cargo_package_version,
rustc_version,
}
}
fn text_encoding(&self) -> TextEncoding {
self.ops.text_encoding
}
}
impl Default for Automerge {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug)]
pub struct SaveOptions {
pub deflate: bool,
pub retain_orphans: bool,
}
impl SaveOptions {
fn compress(&self) -> CompressConfig {
if self.deflate {
CompressConfig::Threshold(change::DEFLATE_MIN_SIZE)
} else {
CompressConfig::None
}
}
}
impl std::default::Default for SaveOptions {
fn default() -> Self {
Self {
deflate: true,
retain_orphans: true,
}
}
}
#[derive(Debug)]
pub(crate) struct Isolation {
actor_index: usize,
seq: u64,
clock: Clock,
}
pub(crate) fn reconstruct_document<'a>(
doc: &'a storage::Document<'a>,
mode: VerificationMode,
text_encoding: TextEncoding,
) -> Result<Automerge, AutomergeError> {
let storage::load::ReconOpSet {
op_set,
heads,
max_op,
change_graph,
..
} = storage::load::reconstruct_opset(doc, mode, text_encoding)
.map_err(|e| load::Error::InflateDocument(Box::new(e)))?;
let mut doc = Automerge {
queue: vec![],
change_graph,
ops: op_set,
deps: heads.into_iter().collect(),
actor: Actor::Unused(ActorId::random()),
max_op,
};
doc.remove_unused_actors(false);
Ok(doc)
}