use std::path::Path;
use itertools::Itertools as _;
use jj_lib::repo::Repo as _;
use jj_lib::repo::StoreFactories;
use jj_lib::workspace::Workspace;
use jj_lib::workspace::default_working_copy_factories;
use pollster::FutureExt as _;
use test_case::test_case;
use testutils::TestRepoBackend;
use testutils::TestResult;
use testutils::TestWorkspace;
use testutils::write_random_commit;
use testutils::write_random_commit_with_parents;
fn copy_directory(src: &Path, dst: &Path) {
std::fs::create_dir(dst).ok();
for entry in std::fs::read_dir(src).unwrap() {
let child_src = entry.unwrap().path();
let base_name = child_src.file_name().unwrap();
let child_dst = dst.join(base_name);
if child_src.is_dir() {
copy_directory(&child_src, &child_dst);
} else {
std::fs::copy(&child_src, &child_dst).unwrap();
}
}
}
fn merge_directories(left: &Path, base: &Path, right: &Path, output: &Path) {
std::fs::create_dir(output).unwrap();
let mut sub_dirs = vec![];
if left.exists() {
for entry in std::fs::read_dir(left).unwrap() {
let path = entry.unwrap().path();
let base_name = path.file_name().unwrap();
let child_left = left.join(base_name);
let child_output = output.join(base_name);
if child_left.is_dir() {
sub_dirs.push(base_name.to_os_string());
} else {
std::fs::copy(&child_left, child_output).unwrap();
}
}
}
if base.exists() {
for entry in std::fs::read_dir(base).unwrap() {
let path = entry.unwrap().path();
let base_name = path.file_name().unwrap();
let child_base = base.join(base_name);
let child_right = right.join(base_name);
let child_output = output.join(base_name);
if child_base.is_dir() {
sub_dirs.push(base_name.to_os_string());
} else if !child_right.exists() {
std::fs::remove_file(child_output).ok();
}
}
}
if right.exists() {
for entry in std::fs::read_dir(right).unwrap() {
let path = entry.unwrap().path();
let base_name = path.file_name().unwrap();
let child_base = base.join(base_name);
let child_right = right.join(base_name);
let child_output = output.join(base_name);
if child_right.is_dir() {
sub_dirs.push(base_name.to_os_string());
} else if !child_base.exists() {
std::fs::copy(&child_right, child_output).unwrap();
}
}
}
for base_name in sub_dirs.iter().sorted().dedup() {
let child_base = base.join(base_name);
let child_right = right.join(base_name);
let child_left = left.join(base_name);
let child_output = output.join(base_name);
merge_directories(&child_left, &child_base, &child_right, &child_output);
}
}
#[test_case(TestRepoBackend::Simple; "simple backend")]
#[test_case(TestRepoBackend::Git; "git backend")]
fn test_bad_locking_children(backend: TestRepoBackend) -> TestResult {
let settings = testutils::user_settings();
let test_workspace = TestWorkspace::init_with_backend_and_settings(backend, &settings);
let repo = &test_workspace.repo;
let workspace_root = test_workspace.workspace.workspace_root();
let mut tx = repo.start_transaction();
let initial = write_random_commit(tx.repo_mut());
tx.commit("test").block_on()?;
let machine1_root = test_workspace.root_dir().join("machine1");
copy_directory(workspace_root, &machine1_root);
let machine1_workspace = Workspace::load(
&settings,
&machine1_root,
&StoreFactories::default(),
&default_working_copy_factories(),
)?;
let machine1_repo = machine1_workspace.repo_loader().load_at_head().block_on()?;
let mut machine1_tx = machine1_repo.start_transaction();
let child1 = write_random_commit_with_parents(machine1_tx.repo_mut(), &[&initial]);
machine1_tx.commit("test").block_on()?;
let machine2_root = test_workspace.root_dir().join("machine2");
copy_directory(workspace_root, &machine2_root);
let machine2_workspace = Workspace::load(
&settings,
&machine2_root,
&StoreFactories::default(),
&default_working_copy_factories(),
)?;
let machine2_repo = machine2_workspace.repo_loader().load_at_head().block_on()?;
let mut machine2_tx = machine2_repo.start_transaction();
let child2 = write_random_commit_with_parents(machine2_tx.repo_mut(), &[&initial]);
machine2_tx.commit("test").block_on()?;
let merged_path = test_workspace.root_dir().join("merged");
merge_directories(&machine1_root, workspace_root, &machine2_root, &merged_path);
let merged_workspace = Workspace::load(
&settings,
&merged_path,
&StoreFactories::default(),
&default_working_copy_factories(),
)?;
let merged_repo = merged_workspace.repo_loader().load_at_head().block_on()?;
assert!(merged_repo.view().heads().contains(child1.id()));
assert!(merged_repo.view().heads().contains(child2.id()));
let op_id = merged_repo.op_id().clone();
let op = merged_repo.op_store().read_operation(&op_id).block_on()?;
assert_eq!(op.parents.len(), 2);
Ok(())
}
#[test_case(TestRepoBackend::Simple ; "simple backend")]
#[test_case(TestRepoBackend::Git ; "git backend")]
fn test_bad_locking_interrupted(backend: TestRepoBackend) -> TestResult {
let settings = testutils::user_settings();
let test_workspace = TestWorkspace::init_with_backend_and_settings(backend, &settings);
let test_env = &test_workspace.env;
let repo = &test_workspace.repo;
let mut tx = repo.start_transaction();
let initial = write_random_commit(tx.repo_mut());
let repo = tx.commit("test").block_on()?;
let op_heads_dir = test_workspace.repo_path().join("op_heads");
let backup_path = test_workspace.root_dir().join("backup");
copy_directory(&op_heads_dir, &backup_path);
let mut tx = repo.start_transaction();
write_random_commit_with_parents(tx.repo_mut(), &[&initial]);
let op_id = tx.commit("test").block_on()?.operation().id().clone();
copy_directory(&backup_path, &op_heads_dir);
let reloaded_repo = test_env.load_repo_at_head(&settings, test_workspace.repo_path());
assert_eq!(reloaded_repo.op_id(), &op_id);
let reloaded_repo = test_env.load_repo_at_head(&settings, test_workspace.repo_path());
assert_eq!(reloaded_repo.op_id(), &op_id);
Ok(())
}