use std::collections::{BinaryHeap, HashSet, VecDeque};
use std::sync::Arc;
use panproto_gat::{
NaturalTransformation, Sort, Term, Theory, TheoryMorphism, check_natural_transformation,
};
use panproto_mig::Migration;
use panproto_schema::Schema;
use crate::error::VcsError;
use crate::hash::ObjectId;
use crate::object::{CommitObject, Object};
use crate::store::Store;
#[derive(Clone, Debug)]
#[must_use]
pub struct CompositionResult {
pub migration: Migration,
pub coherence_warnings: Vec<String>,
}
impl Default for CompositionResult {
fn default() -> Self {
Self {
migration: Migration::empty(),
coherence_warnings: Vec::new(),
}
}
}
pub fn merge_base(
store: &dyn Store,
a: ObjectId,
b: ObjectId,
) -> Result<Option<ObjectId>, VcsError> {
if a == b {
return Ok(Some(a));
}
let ancestors_a = all_ancestors(store, a)?;
let ancestors_b = all_ancestors(store, b)?;
let common: HashSet<ObjectId> = ancestors_a.intersection(&ancestors_b).copied().collect();
if common.is_empty() {
return Ok(None);
}
let lcas: Vec<ObjectId> = common
.iter()
.filter(|&&c| {
!common
.iter()
.any(|&d| d != c && ancestors_of_contains(store, d, c))
})
.copied()
.collect();
Ok(lcas.into_iter().max_by(|x, y| {
let tx = get_commit(store, *x).map_or(0, |c| c.timestamp);
let ty = get_commit(store, *y).map_or(0, |c| c.timestamp);
tx.cmp(&ty).then_with(|| x.cmp(y))
}))
}
fn all_ancestors(store: &dyn Store, start: ObjectId) -> Result<HashSet<ObjectId>, VcsError> {
let mut visited = HashSet::new();
let mut queue = VecDeque::new();
visited.insert(start);
queue.push_back(start);
while let Some(current) = queue.pop_front() {
let commit = get_commit(store, current)?;
for &parent in &commit.parents {
if visited.insert(parent) {
queue.push_back(parent);
}
}
}
Ok(visited)
}
fn ancestors_of_contains(store: &dyn Store, descendant: ObjectId, ancestor: ObjectId) -> bool {
let mut visited = HashSet::new();
let mut queue = VecDeque::new();
if let Ok(commit) = get_commit(store, descendant) {
for &parent in &commit.parents {
if parent == ancestor {
return true;
}
if visited.insert(parent) {
queue.push_back(parent);
}
}
}
while let Some(current) = queue.pop_front() {
if let Ok(commit) = get_commit(store, current) {
for &parent in &commit.parents {
if parent == ancestor {
return true;
}
if visited.insert(parent) {
queue.push_back(parent);
}
}
}
}
false
}
pub fn find_path(
store: &dyn Store,
from: ObjectId,
to: ObjectId,
) -> Result<Vec<ObjectId>, VcsError> {
if from == to {
return Ok(vec![from]);
}
let mut visited: HashMap<ObjectId, ObjectId> = HashMap::new(); let mut queue: VecDeque<ObjectId> = VecDeque::new();
queue.push_back(to);
visited.insert(to, to);
while let Some(current) = queue.pop_front() {
let commit = get_commit(store, current)?;
for &parent in &commit.parents {
if visited.contains_key(&parent) {
continue;
}
visited.insert(parent, current);
if parent == from {
let mut path = vec![from];
let mut cursor = from;
while cursor != to {
cursor = visited[&cursor];
path.push(cursor);
}
return Ok(path);
}
queue.push_back(parent);
}
}
Err(VcsError::NoPath)
}
use std::collections::HashMap;
pub fn log_walk(
store: &dyn Store,
start: ObjectId,
limit: Option<usize>,
) -> Result<Vec<CommitObject>, VcsError> {
let mut result = Vec::new();
let mut visited: HashSet<ObjectId> = HashSet::new();
let mut heap: BinaryHeap<(u64, ObjectId)> = BinaryHeap::new();
let first = get_commit(store, start)?;
heap.push((first.timestamp, start));
visited.insert(start);
while let Some((_, commit_id)) = heap.pop() {
let commit = get_commit(store, commit_id)?;
for &parent in &commit.parents {
if visited.insert(parent) {
let parent_commit = get_commit(store, parent)?;
heap.push((parent_commit.timestamp, parent));
}
}
result.push(commit);
if let Some(n) = limit {
if result.len() >= n {
break;
}
}
}
Ok(result)
}
pub fn compose_path(store: &dyn Store, path: &[ObjectId]) -> Result<Migration, VcsError> {
if path.len() < 2 {
return Ok(Migration::empty());
}
let first_commit = get_commit(store, path[1])?;
let mut composed = get_migration(store, first_commit.migration_id)?;
for window in path.windows(2).skip(1) {
let commit = get_commit(store, window[1])?;
let mig = get_migration(store, commit.migration_id)?;
composed = panproto_mig::compose(&composed, &mig)?;
}
Ok(composed)
}
pub fn compose_path_with_coherence(
store: &dyn Store,
path: &[ObjectId],
) -> Result<CompositionResult, VcsError> {
let composed = compose_path(store, path)?;
if path.len() < 2 {
return Ok(CompositionResult {
migration: composed,
coherence_warnings: Vec::new(),
});
}
let mut warnings = Vec::new();
let first_commit = get_commit(store, path[0])?;
let last_commit = get_commit(store, path[path.len() - 1])?;
let source_schema = get_schema(store, first_commit.schema_id)?;
let target_schema = get_schema(store, last_commit.schema_id)?;
let source_theory = theory_from_schema(&source_schema, "Source");
let target_theory = theory_from_schema(&target_schema, "Target");
let composed_morphism =
theory_morphism_from_migration(&composed, "composed", "Source", "Target");
for (src_v, tgt_v) in &composed.vertex_map {
if !target_schema.vertices.contains_key(tgt_v) {
warnings.push(format!(
"composed migration maps vertex '{src_v}' to '{tgt_v}' \
which does not exist in target schema",
));
}
}
for sort in &source_theory.sorts {
if !composed_morphism.sort_map.contains_key(&sort.name) {
warnings.push(format!(
"composed migration does not map source vertex '{}'",
sort.name,
));
}
}
if path.len() >= 3 {
let schema_diff = panproto_check::diff::diff(&source_schema, &target_schema);
let direct_mig =
crate::auto_mig::derive_migration(&source_schema, &target_schema, &schema_diff);
check_drift(
&composed_morphism,
&direct_mig,
&source_theory,
&target_theory,
&mut warnings,
);
}
Ok(CompositionResult {
migration: composed,
coherence_warnings: warnings,
})
}
fn check_drift(
composed_morphism: &TheoryMorphism,
direct_mig: &Migration,
source_theory: &Theory,
target_theory: &Theory,
warnings: &mut Vec<String>,
) {
let direct_morphism = theory_morphism_from_migration(direct_mig, "direct", "Source", "Target");
let mut nt_components = std::collections::HashMap::new();
let mut divergent_sorts = Vec::new();
for sort in &source_theory.sorts {
let composed_img = composed_morphism.sort_map.get(&sort.name);
let direct_img = direct_morphism.sort_map.get(&sort.name);
match (composed_img, direct_img) {
(Some(c), Some(d)) if c == d => {}
(Some(c), Some(d)) => {
divergent_sorts.push((sort.name.to_string(), c.to_string(), d.to_string()));
}
(None, Some(_)) => {
warnings.push(format!(
"direct migration maps '{}' but composed migration does not",
sort.name,
));
}
(Some(_), None) => {
warnings.push(format!(
"composed migration maps '{}' but direct migration does not",
sort.name,
));
}
(None, None) => {}
}
nt_components.insert(Arc::clone(&sort.name), Term::var("x"));
}
for (sort, composed_tgt, direct_tgt) in &divergent_sorts {
warnings.push(format!(
"composition drift: vertex '{sort}' maps to '{composed_tgt}' via composition \
but '{direct_tgt}' via direct migration",
));
}
if divergent_sorts.is_empty() {
let nt = NaturalTransformation {
name: Arc::from("composed_vs_direct"),
source: Arc::from("composed"),
target: Arc::from("direct"),
components: nt_components,
};
if let Err(e) = check_natural_transformation(
&nt,
composed_morphism,
&direct_morphism,
source_theory,
target_theory,
) {
warnings.push(format!("naturality check failed: {e}"));
}
}
}
fn theory_from_schema(schema: &Schema, name: &str) -> Theory {
let sorts: Vec<Sort> = schema
.vertices
.keys()
.map(|v| Sort::simple(&*v.0))
.collect();
Theory::new(name, sorts, Vec::new(), Vec::new())
}
fn theory_morphism_from_migration(
migration: &Migration,
name: &str,
domain: &str,
codomain: &str,
) -> TheoryMorphism {
let sort_map: std::collections::HashMap<Arc<str>, Arc<str>> = migration
.vertex_map
.iter()
.map(|(src, tgt)| (Arc::from(&*src.0), Arc::from(&*tgt.0)))
.collect();
TheoryMorphism::new(
name,
domain,
codomain,
sort_map,
std::collections::HashMap::new(),
)
}
fn get_schema(store: &dyn Store, id: ObjectId) -> Result<Schema, VcsError> {
let proto = crate::tree::project_coproduct_protocol();
crate::tree::assemble_schema_dyn(store, &id, &proto)
}
pub fn is_ancestor(
store: &dyn Store,
ancestor: ObjectId,
descendant: ObjectId,
) -> Result<bool, VcsError> {
if ancestor == descendant {
return Ok(true);
}
let mut visited: HashSet<ObjectId> = HashSet::new();
let mut queue: VecDeque<ObjectId> = VecDeque::new();
queue.push_back(descendant);
visited.insert(descendant);
while let Some(current) = queue.pop_front() {
let commit = get_commit(store, current)?;
for &parent in &commit.parents {
if parent == ancestor {
return Ok(true);
}
if visited.insert(parent) {
queue.push_back(parent);
}
}
}
Ok(false)
}
pub fn commit_count(store: &dyn Store, from: ObjectId, to: ObjectId) -> Result<usize, VcsError> {
let path = find_path(store, from, to)?;
Ok(path.len().saturating_sub(1))
}
fn get_commit(store: &dyn Store, id: ObjectId) -> Result<CommitObject, VcsError> {
match store.get(&id)? {
Object::Commit(c) => Ok(c),
other => Err(VcsError::WrongObjectType {
expected: "commit",
found: other.type_name(),
}),
}
}
fn get_migration(store: &dyn Store, id: Option<ObjectId>) -> Result<Migration, VcsError> {
let id = id.ok_or(VcsError::NoPath)?;
match store.get(&id)? {
Object::Migration { mapping, .. } => Ok(mapping),
other => Err(VcsError::WrongObjectType {
expected: "migration",
found: other.type_name(),
}),
}
}
#[cfg(test)]
#[allow(clippy::cast_possible_truncation)]
mod tests {
use super::*;
use crate::{MemStore, Store};
fn build_linear_history(
n: usize,
) -> Result<(MemStore, Vec<ObjectId>), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let mut ids = Vec::new();
for i in 0..n {
let parents = if i == 0 { vec![] } else { vec![ids[i - 1]] };
let commit = CommitObject::builder(
ObjectId::from_bytes([i as u8; 32]),
"test",
"test",
format!("commit {i}"),
)
.parents(parents)
.timestamp(i as u64 * 100)
.build();
let id = store.put(&Object::Commit(commit))?;
ids.push(id);
}
Ok((store, ids))
}
fn build_diamond_history() -> Result<(MemStore, Vec<ObjectId>), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let c0 = CommitObject::builder(ObjectId::from_bytes([0; 32]), "test", "test", "c0")
.timestamp(100)
.build();
let id0 = store.put(&Object::Commit(c0))?;
let c1 = CommitObject::builder(ObjectId::from_bytes([1; 32]), "test", "test", "c1")
.parents(vec![id0])
.timestamp(200)
.build();
let id1 = store.put(&Object::Commit(c1))?;
let c2 = CommitObject::builder(ObjectId::from_bytes([2; 32]), "test", "test", "c2")
.parents(vec![id0])
.timestamp(300)
.build();
let id2 = store.put(&Object::Commit(c2))?;
let c3 = CommitObject::builder(ObjectId::from_bytes([3; 32]), "test", "test", "c3")
.parents(vec![id1, id2])
.timestamp(400)
.build();
let id3 = store.put(&Object::Commit(c3))?;
Ok((store, vec![id0, id1, id2, id3]))
}
#[test]
fn merge_base_same_commit() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(3)?;
assert_eq!(merge_base(&store, ids[1], ids[1])?, Some(ids[1]));
Ok(())
}
#[test]
fn merge_base_linear() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(5)?;
assert_eq!(merge_base(&store, ids[4], ids[2])?, Some(ids[2]));
Ok(())
}
#[test]
fn merge_base_diamond() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_diamond_history()?;
assert_eq!(merge_base(&store, ids[1], ids[2])?, Some(ids[0]));
Ok(())
}
#[test]
fn merge_base_disjoint() -> Result<(), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let c1 = CommitObject::builder(ObjectId::from_bytes([1; 32]), "test", "test", "orphan1")
.timestamp(100)
.build();
let c2 = CommitObject::builder(ObjectId::from_bytes([2; 32]), "test", "test", "orphan2")
.timestamp(200)
.build();
let id1 = store.put(&Object::Commit(c1))?;
let id2 = store.put(&Object::Commit(c2))?;
assert_eq!(merge_base(&store, id1, id2)?, None);
Ok(())
}
#[test]
fn find_path_linear() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(4)?;
let path = find_path(&store, ids[0], ids[3])?;
assert_eq!(path, vec![ids[0], ids[1], ids[2], ids[3]]);
Ok(())
}
#[test]
fn find_path_same() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(1)?;
let path = find_path(&store, ids[0], ids[0])?;
assert_eq!(path, vec![ids[0]]);
Ok(())
}
#[test]
fn log_walk_linear() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(3)?;
let log = log_walk(&store, ids[2], None)?;
assert_eq!(log.len(), 3);
assert_eq!(log[0].message, "commit 2");
assert_eq!(log[1].message, "commit 1");
assert_eq!(log[2].message, "commit 0");
Ok(())
}
#[test]
fn log_walk_with_limit() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(5)?;
let log = log_walk(&store, ids[4], Some(2))?;
assert_eq!(log.len(), 2);
Ok(())
}
#[test]
fn log_walk_diamond() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_diamond_history()?;
let log = log_walk(&store, ids[3], None)?;
assert_eq!(log.len(), 4);
Ok(())
}
#[test]
fn is_ancestor_true() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(4)?;
assert!(is_ancestor(&store, ids[0], ids[3])?);
Ok(())
}
#[test]
fn is_ancestor_false() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(4)?;
assert!(!is_ancestor(&store, ids[3], ids[0])?);
Ok(())
}
#[test]
fn is_ancestor_self() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(1)?;
assert!(is_ancestor(&store, ids[0], ids[0])?);
Ok(())
}
#[test]
fn commit_count_linear() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_linear_history(5)?;
assert_eq!(commit_count(&store, ids[0], ids[4])?, 4);
Ok(())
}
fn build_criss_cross_history() -> Result<(MemStore, Vec<ObjectId>), Box<dyn std::error::Error>>
{
let mut store = MemStore::new();
let c0 = CommitObject::builder(ObjectId::from_bytes([0; 32]), "test", "test", "c0")
.timestamp(100)
.build();
let id0 = store.put(&Object::Commit(c0))?;
let c1 = CommitObject::builder(ObjectId::from_bytes([1; 32]), "test", "test", "c1")
.parents(vec![id0])
.timestamp(200)
.build();
let id1 = store.put(&Object::Commit(c1))?;
let c2 = CommitObject::builder(ObjectId::from_bytes([2; 32]), "test", "test", "c2")
.parents(vec![id0])
.timestamp(300)
.build();
let id2 = store.put(&Object::Commit(c2))?;
let c3 = CommitObject::builder(ObjectId::from_bytes([3; 32]), "test", "test", "c3")
.parents(vec![id1, id2])
.timestamp(400)
.build();
let id3 = store.put(&Object::Commit(c3))?;
let c4 = CommitObject::builder(ObjectId::from_bytes([4; 32]), "test", "test", "c4")
.parents(vec![id2, id1])
.timestamp(500)
.build();
let id4 = store.put(&Object::Commit(c4))?;
Ok((store, vec![id0, id1, id2, id3, id4]))
}
#[test]
fn merge_base_criss_cross() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_criss_cross_history()?;
let result = merge_base(&store, ids[3], ids[4])?.ok_or("expected Some")?;
assert!(
result == ids[1] || result == ids[2],
"LCA should be c1 or c2, got {result:?}",
);
assert_ne!(
result, ids[0],
"should not return c0 (dominated by c1 and c2)"
);
Ok(())
}
use panproto_gat::Name;
use panproto_schema::{Schema, Vertex};
fn make_test_schema(vertex_ids: &[&str]) -> Schema {
let mut vertices = HashMap::new();
for &id in vertex_ids {
vertices.insert(
Name::from(id),
Vertex {
id: Name::from(id),
kind: Name::from("object"),
nsid: None,
},
);
}
Schema {
protocol: "test".into(),
vertices,
edges: HashMap::new(),
hyper_edges: HashMap::new(),
constraints: HashMap::new(),
required: HashMap::new(),
nsids: HashMap::new(),
entries: Vec::new(),
variants: HashMap::new(),
orderings: HashMap::new(),
recursion_points: HashMap::new(),
spans: HashMap::new(),
usage_modes: HashMap::new(),
nominal: HashMap::new(),
coercions: HashMap::new(),
mergers: HashMap::new(),
defaults: HashMap::new(),
policies: HashMap::new(),
outgoing: HashMap::new(),
incoming: HashMap::new(),
between: HashMap::new(),
}
}
fn build_migration_history() -> Result<(MemStore, Vec<ObjectId>), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let s0 = make_test_schema(&["a", "b"]);
let s1 = make_test_schema(&["a", "b", "c"]);
let s2 = make_test_schema(&["a", "c", "d"]);
let s0_id = crate::tree::store_schema_as_tree(&mut store, s0)?;
let s1_id = crate::tree::store_schema_as_tree(&mut store, s1)?;
let s2_id = crate::tree::store_schema_as_tree(&mut store, s2)?;
let mig01 = Migration {
vertex_map: HashMap::from([
(Name::from("a"), Name::from("a")),
(Name::from("b"), Name::from("b")),
]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let mig01_id = store.put(&Object::Migration {
src: s0_id,
tgt: s1_id,
mapping: mig01,
})?;
let mig12 = Migration {
vertex_map: HashMap::from([
(Name::from("a"), Name::from("a")),
(Name::from("c"), Name::from("c")),
]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let mig12_id = store.put(&Object::Migration {
src: s1_id,
tgt: s2_id,
mapping: mig12,
})?;
let c0 = CommitObject::builder(s0_id, "test", "test", "c0")
.timestamp(100)
.build();
let id0 = store.put(&Object::Commit(c0))?;
let c1 = CommitObject::builder(s1_id, "test", "test", "c1")
.parents(vec![id0])
.migration_id(mig01_id)
.timestamp(200)
.build();
let id1 = store.put(&Object::Commit(c1))?;
let c2 = CommitObject::builder(s2_id, "test", "test", "c2")
.parents(vec![id1])
.migration_id(mig12_id)
.timestamp(300)
.build();
let id2 = store.put(&Object::Commit(c2))?;
Ok((store, vec![id0, id1, id2]))
}
#[test]
fn coherence_short_path_no_warnings() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_migration_history()?;
let result = compose_path_with_coherence(&store, &ids[0..1])?;
assert!(result.coherence_warnings.is_empty());
Ok(())
}
#[test]
fn coherence_two_commit_path() -> Result<(), Box<dyn std::error::Error>> {
let (store, ids) = build_migration_history()?;
let result = compose_path_with_coherence(&store, &ids[0..2])?;
assert_eq!(result.migration.vertex_map.len(), 2);
assert!(result.coherence_warnings.is_empty());
Ok(())
}
#[test]
fn coherence_three_commit_path_no_structural_issues() -> Result<(), Box<dyn std::error::Error>>
{
let (store, ids) = build_migration_history()?;
let result = compose_path_with_coherence(&store, &ids[0..3])?;
assert_eq!(
result.migration.vertex_map.get(&Name::from("a")),
Some(&Name::from("a")),
);
assert!(
!result
.coherence_warnings
.iter()
.any(|w| w.contains("does not exist in target schema")),
"expected no structural coherence warnings"
);
Ok(())
}
#[test]
fn coherence_detects_vertex_not_in_target() -> Result<(), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let s_src = make_test_schema(&["a"]);
let s_tgt = make_test_schema(&["a"]);
let s_src_id = crate::tree::store_schema_as_tree(&mut store, s_src)?;
let s_tgt_id = crate::tree::store_schema_as_tree(&mut store, s_tgt)?;
let bad_mig = Migration {
vertex_map: HashMap::from([(Name::from("a"), Name::from("nonexistent"))]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let bad_mig_id = store.put(&Object::Migration {
src: s_src_id,
tgt: s_tgt_id,
mapping: bad_mig,
})?;
let c0 = CommitObject::builder(s_src_id, "test", "test", "src")
.timestamp(100)
.build();
let csrc_id = store.put(&Object::Commit(c0))?;
let c1 = CommitObject::builder(s_tgt_id, "test", "test", "tgt")
.parents(vec![csrc_id])
.migration_id(bad_mig_id)
.timestamp(200)
.build();
let ctgt_id = store.put(&Object::Commit(c1))?;
let result = compose_path_with_coherence(&store, &[csrc_id, ctgt_id])?;
let has_missing_vertex_warning = result
.coherence_warnings
.iter()
.any(|w| w.contains("does not exist in target schema"));
assert!(
has_missing_vertex_warning,
"expected warning about vertex not in target schema, got: {:?}",
result.coherence_warnings,
);
Ok(())
}
#[test]
fn coherence_detects_unmapped_source_vertex() -> Result<(), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let s_src = make_test_schema(&["a", "b"]);
let s_tgt = make_test_schema(&["a"]);
let s_src_id = crate::tree::store_schema_as_tree(&mut store, s_src)?;
let s_tgt_id = crate::tree::store_schema_as_tree(&mut store, s_tgt)?;
let mig = Migration {
vertex_map: HashMap::from([(Name::from("a"), Name::from("a"))]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let mig_id = store.put(&Object::Migration {
src: s_src_id,
tgt: s_tgt_id,
mapping: mig,
})?;
let c0 = CommitObject::builder(s_src_id, "test", "test", "c0")
.timestamp(100)
.build();
let id0 = store.put(&Object::Commit(c0))?;
let c1 = CommitObject::builder(s_tgt_id, "test", "test", "c1")
.parents(vec![id0])
.migration_id(mig_id)
.timestamp(200)
.build();
let id1 = store.put(&Object::Commit(c1))?;
let result = compose_path_with_coherence(&store, &[id0, id1])?;
let has_unmapped_warning = result
.coherence_warnings
.iter()
.any(|w| w.contains("does not map source vertex"));
assert!(
has_unmapped_warning,
"expected warning about unmapped source vertex, got: {:?}",
result.coherence_warnings,
);
Ok(())
}
#[test]
fn coherence_composition_drift_detected() -> Result<(), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let s0 = make_test_schema(&["a", "b"]);
let s1 = make_test_schema(&["a", "c"]);
let s2 = make_test_schema(&["a", "b"]);
let s0_id = crate::tree::store_schema_as_tree(&mut store, s0)?;
let s1_id = crate::tree::store_schema_as_tree(&mut store, s1)?;
let s2_id = crate::tree::store_schema_as_tree(&mut store, s2)?;
let mig01 = Migration {
vertex_map: HashMap::from([
(Name::from("a"), Name::from("a")),
(Name::from("b"), Name::from("c")),
]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let mig01_id = store.put(&Object::Migration {
src: s0_id,
tgt: s1_id,
mapping: mig01,
})?;
let mig12 = Migration {
vertex_map: HashMap::from([
(Name::from("a"), Name::from("a")),
(Name::from("c"), Name::from("b")),
]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let mig12_id = store.put(&Object::Migration {
src: s1_id,
tgt: s2_id,
mapping: mig12,
})?;
let c0 = CommitObject::builder(s0_id, "test", "test", "c0")
.timestamp(100)
.build();
let id0 = store.put(&Object::Commit(c0))?;
let c1 = CommitObject::builder(s1_id, "test", "test", "c1")
.parents(vec![id0])
.migration_id(mig01_id)
.timestamp(200)
.build();
let id1 = store.put(&Object::Commit(c1))?;
let c2 = CommitObject::builder(s2_id, "test", "test", "c2")
.parents(vec![id1])
.migration_id(mig12_id)
.timestamp(300)
.build();
let id2 = store.put(&Object::Commit(c2))?;
let result = compose_path_with_coherence(&store, &[id0, id1, id2])?;
assert_eq!(
result.migration.vertex_map.get(&Name::from("a")),
Some(&Name::from("a")),
);
assert_eq!(
result.migration.vertex_map.get(&Name::from("b")),
Some(&Name::from("b")),
);
assert!(
!result
.coherence_warnings
.iter()
.any(|w| w.contains("does not exist")),
"unexpected structural warnings: {:?}",
result.coherence_warnings,
);
Ok(())
}
fn build_cyclic_drift_path() -> Result<(MemStore, [ObjectId; 3]), Box<dyn std::error::Error>> {
let mut store = MemStore::new();
let s0 = make_test_schema(&["a", "b", "c", "d"]);
let s1 = make_test_schema(&["a", "p", "q", "r"]);
let s2 = make_test_schema(&["a", "b", "c", "d"]);
let s0_id = crate::tree::store_schema_as_tree(&mut store, s0)?;
let s1_id = crate::tree::store_schema_as_tree(&mut store, s1)?;
let s2_id = crate::tree::store_schema_as_tree(&mut store, s2)?;
let mig01 = Migration {
vertex_map: HashMap::from([
(Name::from("a"), Name::from("a")),
(Name::from("b"), Name::from("p")),
(Name::from("c"), Name::from("q")),
(Name::from("d"), Name::from("r")),
]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let mig01_id = store.put(&Object::Migration {
src: s0_id,
tgt: s1_id,
mapping: mig01,
})?;
let mig12 = Migration {
vertex_map: HashMap::from([
(Name::from("a"), Name::from("a")),
(Name::from("p"), Name::from("c")),
(Name::from("q"), Name::from("d")),
(Name::from("r"), Name::from("b")),
]),
edge_map: HashMap::new(),
hyper_edge_map: HashMap::new(),
label_map: HashMap::new(),
resolver: HashMap::new(),
hyper_resolver: HashMap::new(),
expr_resolvers: HashMap::new(),
};
let mig12_id = store.put(&Object::Migration {
src: s1_id,
tgt: s2_id,
mapping: mig12,
})?;
let c0 = CommitObject::builder(s0_id, "test", "test", "c0")
.timestamp(100)
.build();
let id0 = store.put(&Object::Commit(c0))?;
let c1 = CommitObject::builder(s1_id, "test", "test", "c1")
.parents(vec![id0])
.migration_id(mig01_id)
.timestamp(200)
.build();
let id1 = store.put(&Object::Commit(c1))?;
let c2 = CommitObject::builder(s2_id, "test", "test", "c2")
.parents(vec![id1])
.migration_id(mig12_id)
.timestamp(300)
.build();
let id2 = store.put(&Object::Commit(c2))?;
Ok((store, [id0, id1, id2]))
}
#[test]
fn coherence_detects_actual_composition_drift() -> Result<(), Box<dyn std::error::Error>> {
let (store, [id0, id1, id2]) = build_cyclic_drift_path()?;
let result = compose_path_with_coherence(&store, &[id0, id1, id2])?;
assert_eq!(
result.migration.vertex_map.get(&Name::from("a")),
Some(&Name::from("a"))
);
assert_eq!(
result.migration.vertex_map.get(&Name::from("b")),
Some(&Name::from("c"))
);
assert_eq!(
result.migration.vertex_map.get(&Name::from("c")),
Some(&Name::from("d"))
);
assert_eq!(
result.migration.vertex_map.get(&Name::from("d")),
Some(&Name::from("b"))
);
let has_drift = result
.coherence_warnings
.iter()
.any(|w| w.contains("composition drift"));
assert!(
has_drift,
"expected composition drift warnings, got: {:?}",
result.coherence_warnings,
);
Ok(())
}
#[test]
fn composition_result_must_use() {
let r = CompositionResult::default();
assert!(r.coherence_warnings.is_empty());
assert!(r.migration.vertex_map.is_empty());
}
}