#![expect(missing_docs)]
use std::collections::HashMap;
use std::collections::HashSet;
use std::slice;
use std::sync::Arc;
use futures::StreamExt as _;
use futures::TryStreamExt as _;
use futures::future::try_join_all;
use futures::try_join;
use indexmap::IndexMap;
use indexmap::IndexSet;
use itertools::Itertools as _;
use tracing::instrument;
use crate::backend::BackendError;
use crate::backend::BackendResult;
use crate::backend::CommitId;
use crate::commit::Commit;
use crate::commit::CommitIteratorExt as _;
use crate::commit::conflict_label_for_commits;
use crate::commit_builder::CommitBuilder;
use crate::conflict_labels::ConflictLabels;
use crate::index::Index;
use crate::index::IndexResult;
use crate::index::ResolvedChangeTargets;
use crate::iter_util::fallible_any;
use crate::matchers::FilesMatcher;
use crate::matchers::Matcher;
use crate::matchers::Visit;
use crate::merge::Diff;
use crate::merge::Merge;
use crate::merged_tree::MergedTree;
use crate::merged_tree_builder::MergedTreeBuilder;
use crate::repo::MutableRepo;
use crate::repo::Repo;
use crate::repo_path::RepoPath;
use crate::revset::RevsetExpression;
use crate::revset::RevsetStreamExt as _;
use crate::store::Store;
#[instrument(skip(repo))]
pub async fn merge_commit_trees(repo: &dyn Repo, commits: &[Commit]) -> BackendResult<MergedTree> {
if let [commit] = commits {
Ok(commit.tree())
} else {
merge_commit_trees_no_resolve_without_repo(repo.store(), repo.index(), commits)
.await?
.resolve()
.await
}
}
#[instrument(skip(index))]
pub async fn merge_commit_trees_no_resolve_without_repo(
store: &Arc<Store>,
index: &dyn Index,
commits: &[Commit],
) -> BackendResult<MergedTree> {
let commit_ids = commits
.iter()
.map(|commit| commit.id().clone())
.collect_vec();
let commit_id_merge = find_recursive_merge_commits(store, index, commit_ids)?;
let tree_merge: Merge<(MergedTree, String)> = commit_id_merge
.try_map_async(async |commit_id| {
let commit = store.get_commit_async(commit_id).await?;
Ok::<_, BackendError>((commit.tree(), commit.conflict_label()))
})
.await?;
Ok(MergedTree::merge_no_resolve(tree_merge))
}
pub fn find_recursive_merge_commits(
store: &Arc<Store>,
index: &dyn Index,
mut commit_ids: Vec<CommitId>,
) -> BackendResult<Merge<CommitId>> {
if commit_ids.is_empty() {
Ok(Merge::resolved(store.root_commit_id().clone()))
} else if commit_ids.len() == 1 {
Ok(Merge::resolved(commit_ids.pop().unwrap()))
} else {
let mut result = Merge::resolved(commit_ids[0].clone());
for (i, other_commit_id) in commit_ids.iter().enumerate().skip(1) {
let ancestor_ids = index
.common_ancestors(&commit_ids[0..i], &commit_ids[i..][..1])
.map_err(|err| BackendError::Other(err.into()))?;
let ancestor_merge = find_recursive_merge_commits(store, index, ancestor_ids)?;
result = Merge::from_vec(vec![
result,
ancestor_merge,
Merge::resolved(other_commit_id.clone()),
])
.flatten();
}
Ok(result)
}
}
pub async fn restore_tree(
source: &MergedTree,
destination: &MergedTree,
source_label: String,
destination_label: String,
matcher: &dyn Matcher,
) -> BackendResult<MergedTree> {
if matcher.visit(RepoPath::root()) == Visit::AllRecursively {
return Ok(source.clone());
}
let mut diff_stream = source.diff_stream(destination, matcher);
let mut paths = Vec::new();
while let Some(entry) = diff_stream.next().await {
paths.push(entry.path);
}
let matcher = FilesMatcher::new(paths);
let select_matching =
async |tree: &MergedTree, labels: ConflictLabels| -> BackendResult<MergedTree> {
let empty_tree_ids = Merge::repeated(
tree.store().empty_tree_id().clone(),
tree.tree_ids().num_sides(),
);
let labeled_empty_tree = MergedTree::new(tree.store().clone(), empty_tree_ids, labels);
let mut builder = MergedTreeBuilder::new(labeled_empty_tree);
for (path, value) in tree.entries_matching(&matcher) {
builder.set_or_remove(path, value?);
}
builder.write_tree().await
};
const RESTORE_BASE_LABEL: &str = "base files for restore";
let base_labels = ConflictLabels::from_merge(destination.labels().as_merge().map(|label| {
if label.is_empty() || label.starts_with(RESTORE_BASE_LABEL) {
label.clone()
} else {
format!("{RESTORE_BASE_LABEL} (from {label})")
}
}));
MergedTree::merge(Merge::from_vec(vec![
(
destination.clone(),
format!("{destination_label} (restore destination)"),
),
(
select_matching(destination, base_labels).await?,
format!("{RESTORE_BASE_LABEL} (from {destination_label})"),
),
(
select_matching(source, source.labels().clone()).await?,
format!("restored files (from {source_label})"),
),
]))
.await
}
pub async fn rebase_commit(
mut_repo: &mut MutableRepo,
old_commit: Commit,
new_parents: Vec<CommitId>,
) -> BackendResult<Commit> {
let rewriter = CommitRewriter::new(mut_repo, old_commit, new_parents);
let builder = rewriter.rebase().await?;
builder.write().await
}
pub struct CommitRewriter<'repo> {
mut_repo: &'repo mut MutableRepo,
old_commit: Commit,
new_parents: Vec<CommitId>,
}
impl<'repo> CommitRewriter<'repo> {
pub fn new(
mut_repo: &'repo mut MutableRepo,
old_commit: Commit,
new_parents: Vec<CommitId>,
) -> Self {
Self {
mut_repo,
old_commit,
new_parents,
}
}
pub fn repo_mut(&mut self) -> &mut MutableRepo {
self.mut_repo
}
pub fn old_commit(&self) -> &Commit {
&self.old_commit
}
pub fn new_parents(&self) -> &[CommitId] {
&self.new_parents
}
pub fn set_new_parents(&mut self, new_parents: Vec<CommitId>) {
self.new_parents = new_parents;
}
pub fn set_new_rewritten_parents(&mut self, unrewritten_parents: &[CommitId]) {
self.new_parents = self.mut_repo.new_parents(unrewritten_parents);
}
pub fn replace_parent<'a>(
&mut self,
old_parent: &CommitId,
new_parents: impl IntoIterator<Item = &'a CommitId>,
) {
if let Some(i) = self.new_parents.iter().position(|p| p == old_parent) {
self.new_parents
.splice(i..i + 1, new_parents.into_iter().cloned());
let mut unique = HashSet::new();
self.new_parents.retain(|p| unique.insert(p.clone()));
}
}
pub fn parents_changed(&self) -> bool {
self.new_parents != self.old_commit.parent_ids()
}
pub fn simplify_ancestor_merge(&mut self) -> IndexResult<()> {
let head_set: HashSet<_> = self
.mut_repo
.index()
.heads(&mut self.new_parents.iter())?
.into_iter()
.collect();
self.new_parents.retain(|parent| head_set.contains(parent));
Ok(())
}
pub fn abandon(self) {
let old_commit_id = self.old_commit.id().clone();
let new_parents = self.new_parents;
self.mut_repo
.record_abandoned_commit_with_parents(old_commit_id, new_parents);
}
pub async fn rebase_with_empty_behavior(
self,
empty: EmptyBehavior,
) -> BackendResult<Option<CommitBuilder<'repo>>> {
let old_parents_fut = self.old_commit.parents();
let new_parents_fut = try_join_all(
self.new_parents
.iter()
.map(|new_parent_id| self.mut_repo.store().get_commit_async(new_parent_id)),
);
let (old_parents, new_parents) = try_join!(old_parents_fut, new_parents_fut)?;
let old_parent_trees = old_parents
.iter()
.map(|parent| parent.tree_ids().clone())
.collect_vec();
let new_parent_trees = new_parents
.iter()
.map(|parent| parent.tree_ids().clone())
.collect_vec();
let (was_empty, new_tree) = if new_parent_trees == old_parent_trees {
(
true,
self.old_commit.tree(),
)
} else {
let old_base_tree_fut = merge_commit_trees(self.mut_repo, &old_parents);
let new_base_tree_fut = merge_commit_trees(self.mut_repo, &new_parents);
let old_tree = self.old_commit.tree();
let (old_base_tree, new_base_tree) = try_join!(old_base_tree_fut, new_base_tree_fut)?;
(
old_base_tree.tree_ids() == self.old_commit.tree_ids(),
MergedTree::merge(Merge::from_vec(vec![
(
new_base_tree,
format!(
"{} (rebase destination)",
conflict_label_for_commits(&new_parents)
),
),
(
old_base_tree,
format!(
"{} (parents of rebased revision)",
conflict_label_for_commits(&old_parents)
),
),
(
old_tree,
format!("{} (rebased revision)", self.old_commit.conflict_label()),
),
]))
.await?,
)
};
if let [parent] = &new_parents[..] {
let should_abandon = match empty {
EmptyBehavior::Keep => false,
EmptyBehavior::AbandonNewlyEmpty => {
parent.tree_ids() == new_tree.tree_ids() && !was_empty
}
EmptyBehavior::AbandonAllEmpty => parent.tree_ids() == new_tree.tree_ids(),
};
if should_abandon {
self.abandon();
return Ok(None);
}
}
let builder = self
.mut_repo
.rewrite_commit(&self.old_commit)
.set_parents(self.new_parents)
.set_tree(new_tree);
Ok(Some(builder))
}
pub async fn rebase(self) -> BackendResult<CommitBuilder<'repo>> {
let builder = self.rebase_with_empty_behavior(EmptyBehavior::Keep).await?;
Ok(builder.unwrap())
}
pub fn reparent(self) -> CommitBuilder<'repo> {
self.mut_repo
.rewrite_commit(&self.old_commit)
.set_parents(self.new_parents)
}
}
#[derive(Debug)]
pub enum RebasedCommit {
Rewritten(Commit),
Abandoned { parent_id: CommitId },
}
pub async fn rebase_commit_with_options(
mut rewriter: CommitRewriter<'_>,
options: &RebaseOptions,
) -> BackendResult<RebasedCommit> {
if options.simplify_ancestor_merge {
rewriter
.simplify_ancestor_merge()
.map_err(|err| BackendError::Other(err.into()))?;
}
let single_parent = match &rewriter.new_parents[..] {
[parent_id] => Some(parent_id.clone()),
_ => None,
};
let new_parents_len = rewriter.new_parents.len();
if let Some(builder) = rewriter.rebase_with_empty_behavior(options.empty).await? {
let new_commit = builder.write().await?;
Ok(RebasedCommit::Rewritten(new_commit))
} else {
assert_eq!(new_parents_len, 1);
Ok(RebasedCommit::Abandoned {
parent_id: single_parent.unwrap(),
})
}
}
pub async fn rebase_to_dest_parent(
repo: &dyn Repo,
sources: &[Commit],
destination: &Commit,
) -> BackendResult<MergedTree> {
if let [source] = sources
&& source.parent_ids() == destination.parent_ids()
{
return Ok(source.tree());
}
let diffs: Vec<_> = try_join_all(sources.iter().map(async |source| -> BackendResult<_> {
Ok(Diff::new(
(
source.parent_tree(repo).await?,
format!(
"{} (original parents)",
source.parents_conflict_label().await?
),
),
(
source.tree(),
format!("{} (original revision)", source.conflict_label()),
),
))
}))
.await?;
MergedTree::merge(Merge::from_diffs(
(
destination.parent_tree(repo).await?,
format!(
"{} (new parents)",
destination.parents_conflict_label().await?
),
),
diffs,
))
.await
}
#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
pub enum EmptyBehavior {
#[default]
Keep,
AbandonNewlyEmpty,
AbandonAllEmpty,
}
#[derive(Clone, Debug, Default)]
pub struct RebaseOptions {
pub empty: EmptyBehavior,
pub rewrite_refs: RewriteRefsOptions,
pub simplify_ancestor_merge: bool,
}
#[derive(Clone, Debug, Default)]
pub struct RewriteRefsOptions {
pub delete_abandoned_bookmarks: bool,
}
#[derive(Debug)]
pub struct MoveCommitsStats {
pub num_rebased_targets: u32,
pub num_rebased_descendants: u32,
pub num_skipped_rebases: u32,
pub num_abandoned_empty: u32,
pub rebased_commits: HashMap<CommitId, RebasedCommit>,
}
#[derive(Clone, Debug)]
pub struct MoveCommitsLocation {
pub new_parent_ids: Vec<CommitId>,
pub new_child_ids: Vec<CommitId>,
pub target: MoveCommitsTarget,
}
#[derive(Clone, Debug)]
pub enum MoveCommitsTarget {
Commits(Vec<CommitId>),
Roots(Vec<CommitId>),
}
#[derive(Clone, Debug)]
pub struct ComputedMoveCommits {
target_commit_ids: IndexSet<CommitId>,
descendants: Vec<Commit>,
commit_new_parents_map: HashMap<CommitId, Vec<CommitId>>,
to_abandon: HashSet<CommitId>,
}
impl ComputedMoveCommits {
fn empty() -> Self {
Self {
target_commit_ids: IndexSet::new(),
descendants: vec![],
commit_new_parents_map: HashMap::new(),
to_abandon: HashSet::new(),
}
}
pub fn record_to_abandon(&mut self, commit_ids: impl IntoIterator<Item = CommitId>) {
self.to_abandon.extend(commit_ids);
}
pub async fn apply(
self,
mut_repo: &mut MutableRepo,
options: &RebaseOptions,
) -> BackendResult<MoveCommitsStats> {
apply_move_commits(mut_repo, self, options).await
}
}
pub async fn move_commits(
mut_repo: &mut MutableRepo,
loc: &MoveCommitsLocation,
options: &RebaseOptions,
) -> BackendResult<MoveCommitsStats> {
compute_move_commits(mut_repo, loc)
.await?
.apply(mut_repo, options)
.await
}
pub async fn compute_move_commits(
repo: &MutableRepo,
loc: &MoveCommitsLocation,
) -> BackendResult<ComputedMoveCommits> {
let target_commit_ids: IndexSet<CommitId>;
let connected_target_commits: Vec<Commit>;
let connected_target_commits_internal_parents: HashMap<CommitId, IndexSet<CommitId>>;
let target_roots: HashSet<CommitId>;
match &loc.target {
MoveCommitsTarget::Commits(commit_ids) => {
if commit_ids.is_empty() {
return Ok(ComputedMoveCommits::empty());
}
target_commit_ids = commit_ids.iter().cloned().collect();
connected_target_commits = RevsetExpression::commits(commit_ids.clone())
.connected()
.evaluate(repo)
.map_err(|err| err.into_backend_error())?
.stream()
.commits(repo.store())
.try_collect()
.await
.map_err(|err| err.into_backend_error())?;
connected_target_commits_internal_parents =
compute_internal_parents_within(&target_commit_ids, &connected_target_commits);
target_roots = connected_target_commits_internal_parents
.iter()
.filter(|&(commit_id, parents)| {
target_commit_ids.contains(commit_id) && parents.is_empty()
})
.map(|(commit_id, _)| commit_id.clone())
.collect();
}
MoveCommitsTarget::Roots(root_ids) => {
if root_ids.is_empty() {
return Ok(ComputedMoveCommits::empty());
}
target_commit_ids = RevsetExpression::commits(root_ids.clone())
.descendants()
.evaluate(repo)
.map_err(|err| err.into_backend_error())?
.stream()
.try_collect()
.await
.map_err(|err| err.into_backend_error())?;
connected_target_commits = try_join_all(
target_commit_ids
.iter()
.map(|id| repo.store().get_commit_async(id)),
)
.await?;
connected_target_commits_internal_parents = HashMap::new();
target_roots = root_ids.iter().cloned().collect();
}
}
let mut target_commits_external_parents: HashMap<CommitId, IndexSet<CommitId>> = HashMap::new();
for id in target_commit_ids.iter().rev() {
let commit = repo.store().get_commit_async(id).await?;
let mut new_parents = IndexSet::new();
for old_parent in commit.parent_ids() {
if let Some(parents) = target_commits_external_parents.get(old_parent) {
new_parents.extend(parents.iter().cloned());
} else {
new_parents.insert(old_parent.clone());
}
}
target_commits_external_parents.insert(commit.id().clone(), new_parents);
}
let new_parent_ids: Vec<_> = loc
.new_parent_ids
.iter()
.flat_map(|parent_id| {
if let Some(parent_ids) = target_commits_external_parents.get(parent_id) {
parent_ids.iter().cloned().collect_vec()
} else {
vec![parent_id.clone()]
}
})
.collect();
let new_children: Vec<_> = if loc
.new_child_ids
.iter()
.any(|id| target_commit_ids.contains(id))
{
let target_commits_descendants: Vec<_> =
RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
.union(
&RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
.children(),
)
.evaluate(repo)
.map_err(|err| err.into_backend_error())?
.stream()
.commits(repo.store())
.try_collect()
.await
.map_err(|err| err.into_backend_error())?;
let mut target_commit_external_descendants: HashMap<CommitId, IndexSet<Commit>> =
HashMap::new();
for commit in &target_commits_descendants {
if !target_commit_external_descendants.contains_key(commit.id()) {
let children = if target_commit_ids.contains(commit.id()) {
IndexSet::new()
} else {
IndexSet::from([commit.clone()])
};
target_commit_external_descendants.insert(commit.id().clone(), children);
}
let children = target_commit_external_descendants
.get(commit.id())
.unwrap()
.iter()
.cloned()
.collect_vec();
for parent_id in commit.parent_ids() {
if target_commit_ids.contains(parent_id) {
if let Some(target_children) =
target_commit_external_descendants.get_mut(parent_id)
{
target_children.extend(children.iter().cloned());
} else {
target_commit_external_descendants
.insert(parent_id.clone(), children.iter().cloned().collect());
}
}
}
}
let mut new_children = Vec::new();
for id in &loc.new_child_ids {
if let Some(children) = target_commit_external_descendants.get(id) {
new_children.extend(children.iter().cloned());
} else {
new_children.push(repo.store().get_commit_async(id).await?);
}
}
new_children
} else {
try_join_all(
loc.new_child_ids
.iter()
.map(|id| repo.store().get_commit_async(id)),
)
.await?
};
let new_children_parents: HashMap<_, _> = if !new_children.is_empty() {
let target_heads = compute_commits_heads(&target_commit_ids, &connected_target_commits);
new_children
.iter()
.map(|child_commit| {
let mut new_child_parent_ids = IndexSet::new();
for old_child_parent_id in child_commit.parent_ids() {
let old_child_parent_ids = if let Some(parents) =
target_commits_external_parents.get(old_child_parent_id)
{
parents.iter().collect_vec()
} else {
vec![old_child_parent_id]
};
for id in old_child_parent_ids {
if new_parent_ids.contains(id) {
new_child_parent_ids.extend(target_heads.clone());
} else {
new_child_parent_ids.insert(id.clone());
}
}
}
new_child_parent_ids.extend(target_heads.clone());
(
child_commit.id().clone(),
new_child_parent_ids.into_iter().collect_vec(),
)
})
.collect()
} else {
HashMap::new()
};
let mut roots = target_roots.iter().cloned().collect_vec();
roots.extend(new_children.iter().ids().cloned());
let descendants = repo.find_descendants_for_rebase(roots.clone()).await?;
let commit_new_parents_map = descendants
.iter()
.map(|commit| -> BackendResult<_> {
let commit_id = commit.id();
let new_parent_ids =
if let Some(new_child_parents) = new_children_parents.get(commit_id) {
new_child_parents.clone()
} else if target_commit_ids.contains(commit_id) {
if target_roots.contains(commit_id) {
new_parent_ids.clone()
} else {
let mut new_parents = vec![];
for parent_id in commit.parent_ids() {
if target_commit_ids.contains(parent_id) {
new_parents.push(parent_id.clone());
} else if let Some(parents) =
connected_target_commits_internal_parents.get(parent_id)
{
new_parents.extend(parents.iter().cloned());
} else if !fallible_any(&new_children, |child| {
repo.index().is_ancestor(child.id(), parent_id)
})
.map_err(|err| BackendError::Other(err.into()))?
{
new_parents.push(parent_id.clone());
}
}
new_parents
}
} else if commit
.parent_ids()
.iter()
.any(|id| target_commits_external_parents.contains_key(id))
{
let mut new_parents = vec![];
for parent in commit.parent_ids() {
if let Some(parents) = target_commits_external_parents.get(parent) {
new_parents.extend(parents.iter().cloned());
} else {
new_parents.push(parent.clone());
}
}
new_parents
} else {
commit.parent_ids().iter().cloned().collect_vec()
};
Ok((commit.id().clone(), new_parent_ids))
})
.try_collect()?;
Ok(ComputedMoveCommits {
target_commit_ids,
descendants,
commit_new_parents_map,
to_abandon: HashSet::new(),
})
}
async fn apply_move_commits(
mut_repo: &mut MutableRepo,
commits: ComputedMoveCommits,
options: &RebaseOptions,
) -> BackendResult<MoveCommitsStats> {
let mut num_rebased_targets = 0;
let mut num_rebased_descendants = 0;
let mut num_skipped_rebases = 0;
let mut num_abandoned_empty = 0;
let rebase_descendant_options = &RebaseOptions {
empty: EmptyBehavior::Keep,
rewrite_refs: options.rewrite_refs.clone(),
simplify_ancestor_merge: false,
};
let mut rebased_commits: HashMap<CommitId, RebasedCommit> = HashMap::new();
mut_repo
.transform_commits(
commits.descendants,
&commits.commit_new_parents_map,
&options.rewrite_refs,
async |rewriter| {
let old_commit_id = rewriter.old_commit().id().clone();
if commits.to_abandon.contains(&old_commit_id) {
rewriter.abandon();
} else if rewriter.parents_changed() {
let is_target_commit = commits.target_commit_ids.contains(&old_commit_id);
let rebased_commit = rebase_commit_with_options(
rewriter,
if is_target_commit {
options
} else {
rebase_descendant_options
},
)
.await?;
if let RebasedCommit::Abandoned { .. } = rebased_commit {
num_abandoned_empty += 1;
} else if is_target_commit {
num_rebased_targets += 1;
} else {
num_rebased_descendants += 1;
}
rebased_commits.insert(old_commit_id, rebased_commit);
} else {
num_skipped_rebases += 1;
}
Ok(())
},
)
.await?;
Ok(MoveCommitsStats {
num_rebased_targets,
num_rebased_descendants,
num_skipped_rebases,
num_abandoned_empty,
rebased_commits,
})
}
#[derive(Debug, Default)]
pub struct DuplicateCommitsStats {
pub duplicated_commits: IndexMap<CommitId, Commit>,
pub num_rebased: u32,
}
pub async fn duplicate_commits(
mut_repo: &mut MutableRepo,
target_commit_ids: &[CommitId],
target_descriptions: &HashMap<CommitId, String>,
parent_commit_ids: &[CommitId],
children_commit_ids: &[CommitId],
) -> BackendResult<DuplicateCommitsStats> {
if target_commit_ids.is_empty() {
return Ok(DuplicateCommitsStats::default());
}
let mut duplicated_old_to_new: IndexMap<CommitId, Commit> = IndexMap::new();
let mut num_rebased = 0;
let target_commit_ids: IndexSet<_> = target_commit_ids.iter().cloned().collect();
let connected_target_commits: Vec<_> =
RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
.connected()
.evaluate(mut_repo)
.map_err(|err| err.into_backend_error())?
.stream()
.commits(mut_repo.store())
.try_collect()
.await
.map_err(|err| err.into_backend_error())?;
let target_commits_internal_parents = {
let mut target_commits_internal_parents =
compute_internal_parents_within(&target_commit_ids, &connected_target_commits);
target_commits_internal_parents.retain(|id, _| target_commit_ids.contains(id));
target_commits_internal_parents
};
let target_root_ids: HashSet<_> = target_commits_internal_parents
.iter()
.filter(|(_, parents)| parents.is_empty())
.map(|(commit_id, _)| commit_id.clone())
.collect();
let target_head_ids = if !children_commit_ids.is_empty() {
compute_commits_heads(&target_commit_ids, &connected_target_commits)
} else {
vec![]
};
for original_commit_id in target_commit_ids.iter().rev() {
let original_commit = mut_repo
.store()
.get_commit_async(original_commit_id)
.await?;
let new_parent_ids = if target_root_ids.contains(original_commit_id) {
parent_commit_ids.to_vec()
} else {
target_commits_internal_parents
.get(original_commit_id)
.unwrap()
.iter()
.map(|id| {
duplicated_old_to_new
.get(id)
.map_or(id, |commit| commit.id())
.clone()
})
.collect()
};
let mut new_commit_builder = CommitRewriter::new(mut_repo, original_commit, new_parent_ids)
.rebase()
.await?
.clear_rewrite_source()
.generate_new_change_id();
if let Some(desc) = target_descriptions.get(original_commit_id) {
new_commit_builder = new_commit_builder.set_description(desc);
}
duplicated_old_to_new.insert(
original_commit_id.clone(),
new_commit_builder.write().await?,
);
}
let target_head_ids = target_head_ids
.into_iter()
.map(|commit_id| {
duplicated_old_to_new
.get(&commit_id)
.map_or(commit_id, |commit| commit.id().clone())
})
.collect_vec();
let children_commit_ids_set: HashSet<CommitId> = children_commit_ids.iter().cloned().collect();
mut_repo
.transform_descendants(children_commit_ids.to_vec(), async |mut rewriter| {
if children_commit_ids_set.contains(rewriter.old_commit().id()) {
let mut child_new_parent_ids = IndexSet::new();
for old_parent_id in rewriter.old_commit().parent_ids() {
if parent_commit_ids.contains(old_parent_id) {
child_new_parent_ids.extend(target_head_ids.clone());
} else {
child_new_parent_ids.insert(old_parent_id.clone());
}
}
child_new_parent_ids.extend(target_head_ids.clone());
rewriter.set_new_parents(child_new_parent_ids.into_iter().collect());
}
num_rebased += 1;
rewriter.rebase().await?.write().await?;
Ok(())
})
.await?;
Ok(DuplicateCommitsStats {
duplicated_commits: duplicated_old_to_new,
num_rebased,
})
}
pub async fn duplicate_commits_onto_parents(
mut_repo: &mut MutableRepo,
target_commits: &[CommitId],
target_descriptions: &HashMap<CommitId, String>,
) -> BackendResult<DuplicateCommitsStats> {
if target_commits.is_empty() {
return Ok(DuplicateCommitsStats::default());
}
let mut duplicated_old_to_new: IndexMap<CommitId, Commit> = IndexMap::new();
for original_commit_id in target_commits.iter().rev() {
let original_commit = mut_repo
.store()
.get_commit_async(original_commit_id)
.await?;
let new_parent_ids = original_commit
.parent_ids()
.iter()
.map(|id| {
duplicated_old_to_new
.get(id)
.map_or(id, |commit| commit.id())
.clone()
})
.collect();
let mut new_commit_builder = mut_repo
.rewrite_commit(&original_commit)
.clear_rewrite_source()
.generate_new_change_id()
.set_parents(new_parent_ids);
if let Some(desc) = target_descriptions.get(original_commit_id) {
new_commit_builder = new_commit_builder.set_description(desc);
}
duplicated_old_to_new.insert(
original_commit_id.clone(),
new_commit_builder.write().await?,
);
}
Ok(DuplicateCommitsStats {
duplicated_commits: duplicated_old_to_new,
num_rebased: 0,
})
}
fn compute_internal_parents_within(
target_commit_ids: &IndexSet<CommitId>,
graph_commits: &[Commit],
) -> HashMap<CommitId, IndexSet<CommitId>> {
let mut internal_parents: HashMap<CommitId, IndexSet<CommitId>> = HashMap::new();
for commit in graph_commits.iter().rev() {
let mut new_parents = IndexSet::new();
for old_parent in commit.parent_ids() {
if target_commit_ids.contains(old_parent) {
new_parents.insert(old_parent.clone());
} else if let Some(parents) = internal_parents.get(old_parent) {
new_parents.extend(parents.iter().cloned());
}
}
internal_parents.insert(commit.id().clone(), new_parents);
}
internal_parents
}
fn compute_commits_heads(
target_commit_ids: &IndexSet<CommitId>,
connected_target_commits: &[Commit],
) -> Vec<CommitId> {
let mut target_head_ids: HashSet<CommitId> = HashSet::new();
for commit in connected_target_commits.iter().rev() {
target_head_ids.insert(commit.id().clone());
for old_parent in commit.parent_ids() {
target_head_ids.remove(old_parent);
}
}
connected_target_commits
.iter()
.rev()
.filter(|commit| {
target_head_ids.contains(commit.id()) && target_commit_ids.contains(commit.id())
})
.map(|commit| commit.id().clone())
.collect_vec()
}
#[derive(Debug)]
pub struct CommitWithSelection {
pub commit: Commit,
pub selected_tree: MergedTree,
pub parent_tree: MergedTree,
}
impl CommitWithSelection {
pub fn is_full_selection(&self) -> bool {
self.selected_tree.tree_ids() == self.commit.tree_ids()
}
pub fn is_empty_selection(&self) -> bool {
self.selected_tree.tree_ids() == self.parent_tree.tree_ids()
}
pub async fn diff_with_labels(
&self,
parent_tree_label: &str,
selected_tree_label: &str,
full_selection_label: &str,
) -> BackendResult<Diff<(MergedTree, String)>> {
let parent_tree_label = format!(
"{} ({parent_tree_label})",
self.commit.parents_conflict_label().await?
);
let commit_label = self.commit.conflict_label();
let selected_tree_label = if self.is_full_selection() {
format!("{commit_label} ({full_selection_label})")
} else {
format!("{selected_tree_label} (from {commit_label})")
};
Ok(Diff::new(
(self.parent_tree.clone(), parent_tree_label),
(self.selected_tree.clone(), selected_tree_label),
))
}
}
#[must_use]
pub struct SquashedCommit<'repo> {
pub commit_builder: CommitBuilder<'repo>,
pub abandoned_commits: Vec<Commit>,
}
pub async fn squash_commits<'repo>(
repo: &'repo mut MutableRepo,
sources: &[CommitWithSelection],
destination: &Commit,
keep_emptied: bool,
) -> BackendResult<Option<SquashedCommit<'repo>>> {
struct SourceCommit<'a> {
commit: &'a CommitWithSelection,
diff: Diff<(MergedTree, String)>,
abandon: bool,
}
let mut source_commits = vec![];
for source in sources {
let abandon = !keep_emptied && source.is_full_selection();
if !abandon && source.is_empty_selection() {
continue;
}
source_commits.push(SourceCommit {
commit: source,
diff: source
.diff_with_labels(
"parents of squashed revision",
"selected changes for squash",
"squashed revision",
)
.await?,
abandon,
});
}
if source_commits.is_empty() {
return Ok(None);
}
let mut abandoned_commits = vec![];
for source in &source_commits {
if source.abandon {
repo.record_abandoned_commit(&source.commit.commit);
abandoned_commits.push(source.commit.commit.clone());
} else {
let source_tree = source.commit.commit.tree();
let new_source_tree = MergedTree::merge(Merge::from_diffs(
(source_tree, source.commit.commit.conflict_label()),
[source.diff.clone().invert()],
))
.await?;
repo.rewrite_commit(&source.commit.commit)
.set_tree(new_source_tree)
.write()
.await?;
}
}
let mut rewritten_destination = destination.clone();
if fallible_any(sources, |source| {
repo.index()
.is_ancestor(source.commit.id(), destination.id())
})
.map_err(|err| BackendError::Other(err.into()))?
{
let options = RebaseOptions::default();
repo.rebase_descendants_with_options(&options, |old_commit, rebased_commit| {
if old_commit.id() != destination.id() {
return;
}
rewritten_destination = match rebased_commit {
RebasedCommit::Rewritten(commit) => commit,
RebasedCommit::Abandoned { .. } => panic!("all commits should be kept"),
};
})
.await?;
}
let mut predecessors = vec![destination.id().clone()];
predecessors.extend(
source_commits
.iter()
.map(|source| source.commit.commit.id().clone()),
);
let destination_tree = MergedTree::merge(Merge::from_diffs(
(
rewritten_destination.tree(),
format!("{} (squash destination)", destination.conflict_label()),
),
source_commits.into_iter().map(|source| source.diff),
))
.await?;
let commit_builder = repo
.rewrite_commit(&rewritten_destination)
.set_tree(destination_tree)
.set_predecessors(predecessors);
Ok(Some(SquashedCommit {
commit_builder,
abandoned_commits,
}))
}
pub async fn find_duplicate_divergent_commits(
repo: &dyn Repo,
new_parent_ids: &[CommitId],
target: &MoveCommitsTarget,
) -> BackendResult<Vec<Commit>> {
let target_commits: Vec<Commit> = match target {
MoveCommitsTarget::Commits(commit_ids) => {
try_join_all(
commit_ids
.iter()
.map(|commit_id| repo.store().get_commit_async(commit_id)),
)
.await?
}
MoveCommitsTarget::Roots(root_ids) => RevsetExpression::commits(root_ids.clone())
.descendants()
.evaluate(repo)
.map_err(|err| err.into_backend_error())?
.stream()
.commits(repo.store())
.try_collect()
.await
.map_err(|err| err.into_backend_error())?,
};
let target_commit_ids: HashSet<&CommitId> = target_commits.iter().map(Commit::id).collect();
let divergent_changes: Vec<(&Commit, Vec<CommitId>)> = target_commits
.iter()
.map(|target_commit| -> Result<_, BackendError> {
let mut ancestor_candidates = repo
.resolve_change_id(target_commit.change_id())
.map_err(|err| BackendError::Other(err.into()))?
.and_then(ResolvedChangeTargets::into_visible)
.unwrap_or_default();
ancestor_candidates.retain(|commit_id| !target_commit_ids.contains(commit_id));
Ok((target_commit, ancestor_candidates))
})
.filter_ok(|(_, candidates)| !candidates.is_empty())
.try_collect()?;
if divergent_changes.is_empty() {
return Ok(Vec::new());
}
let target_root_ids = match target {
MoveCommitsTarget::Commits(commit_ids) => commit_ids,
MoveCommitsTarget::Roots(root_ids) => root_ids,
};
let is_new_ancestor = RevsetExpression::commits(target_root_ids.clone())
.range(&RevsetExpression::commits(new_parent_ids.to_owned()))
.evaluate(repo)
.map_err(|err| err.into_backend_error())?
.containing_fn();
let mut duplicate_divergent = Vec::new();
for (target_commit, ancestor_candidates) in divergent_changes {
for ancestor_candidate_id in ancestor_candidates {
if !is_new_ancestor(&ancestor_candidate_id).map_err(|err| err.into_backend_error())? {
continue;
}
let ancestor_candidate = repo
.store()
.get_commit_async(&ancestor_candidate_id)
.await?;
let new_tree =
rebase_to_dest_parent(repo, slice::from_ref(target_commit), &ancestor_candidate)
.await?;
if new_tree.tree_ids() == ancestor_candidate.tree_ids() {
duplicate_divergent.push(target_commit.clone());
break;
}
}
}
Ok(duplicate_divergent)
}