1#![expect(missing_docs)]
16
17use std::collections::HashMap;
18use std::collections::HashSet;
19use std::slice;
20use std::sync::Arc;
21
22use futures::StreamExt as _;
23use futures::future::try_join_all;
24use futures::try_join;
25use indexmap::IndexMap;
26use indexmap::IndexSet;
27use itertools::Itertools as _;
28use pollster::FutureExt as _;
29use tracing::instrument;
30
31use crate::backend::BackendError;
32use crate::backend::BackendResult;
33use crate::backend::CommitId;
34use crate::backend::MergedTreeId;
35use crate::commit::Commit;
36use crate::commit::CommitIteratorExt as _;
37use crate::commit_builder::CommitBuilder;
38use crate::index::Index;
39use crate::index::IndexResult;
40use crate::iter_util::fallible_any;
41use crate::matchers::Matcher;
42use crate::matchers::Visit;
43use crate::merge::Merge;
44use crate::merged_tree::MergedTree;
45use crate::merged_tree::MergedTreeBuilder;
46use crate::merged_tree::TreeDiffEntry;
47use crate::repo::MutableRepo;
48use crate::repo::Repo;
49use crate::repo_path::RepoPath;
50use crate::revset::RevsetExpression;
51use crate::revset::RevsetIteratorExt as _;
52use crate::store::Store;
53
54#[instrument(skip(repo))]
56pub async fn merge_commit_trees(repo: &dyn Repo, commits: &[Commit]) -> BackendResult<MergedTree> {
57 if let [commit] = commits {
58 commit.tree_async().await
59 } else {
60 merge_commit_trees_no_resolve_without_repo(repo.store(), repo.index(), commits)
61 .await?
62 .resolve()
63 .await
64 }
65}
66
67#[instrument(skip(index))]
69pub async fn merge_commit_trees_no_resolve_without_repo(
70 store: &Arc<Store>,
71 index: &dyn Index,
72 commits: &[Commit],
73) -> BackendResult<MergedTree> {
74 let commit_ids = commits
75 .iter()
76 .map(|commit| commit.id().clone())
77 .collect_vec();
78 let commit_id_merge = find_recursive_merge_commits(store, index, commit_ids)?;
79 let tree_merge = commit_id_merge
80 .try_map_async(async |commit_id| {
81 let commit = store.get_commit_async(commit_id).await?;
82 let tree = commit.tree_async().await?;
83 Ok::<_, BackendError>(tree.into_merge())
84 })
85 .await?;
86 Ok(MergedTree::new(tree_merge.flatten().simplify()))
87}
88
89pub fn find_recursive_merge_commits(
91 store: &Arc<Store>,
92 index: &dyn Index,
93 mut commit_ids: Vec<CommitId>,
94) -> BackendResult<Merge<CommitId>> {
95 if commit_ids.is_empty() {
96 Ok(Merge::resolved(store.root_commit_id().clone()))
97 } else if commit_ids.len() == 1 {
98 Ok(Merge::resolved(commit_ids.pop().unwrap()))
99 } else {
100 let mut result = Merge::resolved(commit_ids[0].clone());
101 for (i, other_commit_id) in commit_ids.iter().enumerate().skip(1) {
102 let ancestor_ids = index
103 .common_ancestors(&commit_ids[0..i], &commit_ids[i..][..1])
104 .map_err(|err| BackendError::Other(err.into()))?;
106 let ancestor_merge = find_recursive_merge_commits(store, index, ancestor_ids)?;
107 result = Merge::from_vec(vec![
108 result,
109 ancestor_merge,
110 Merge::resolved(other_commit_id.clone()),
111 ])
112 .flatten();
113 }
114 Ok(result)
115 }
116}
117
118pub async fn restore_tree(
120 source: &MergedTree,
121 destination: &MergedTree,
122 matcher: &dyn Matcher,
123) -> BackendResult<MergedTreeId> {
124 if matcher.visit(RepoPath::root()) == Visit::AllRecursively {
125 Ok(source.id())
127 } else {
128 let mut tree_builder = MergedTreeBuilder::new(destination.id().clone());
131 let mut diff_stream = source.diff_stream(destination, matcher);
133 while let Some(TreeDiffEntry {
134 path: repo_path,
135 values,
136 }) = diff_stream.next().await
137 {
138 let source_value = values?.before;
139 tree_builder.set_or_remove(repo_path, source_value);
140 }
141 tree_builder.write_tree(destination.store())
142 }
143}
144
145pub async fn rebase_commit(
146 mut_repo: &mut MutableRepo,
147 old_commit: Commit,
148 new_parents: Vec<CommitId>,
149) -> BackendResult<Commit> {
150 let rewriter = CommitRewriter::new(mut_repo, old_commit, new_parents);
151 let builder = rewriter.rebase().await?;
152 builder.write()
153}
154
155pub struct CommitRewriter<'repo> {
157 mut_repo: &'repo mut MutableRepo,
158 old_commit: Commit,
159 new_parents: Vec<CommitId>,
160}
161
162impl<'repo> CommitRewriter<'repo> {
163 pub fn new(
165 mut_repo: &'repo mut MutableRepo,
166 old_commit: Commit,
167 new_parents: Vec<CommitId>,
168 ) -> Self {
169 Self {
170 mut_repo,
171 old_commit,
172 new_parents,
173 }
174 }
175
176 pub fn repo_mut(&mut self) -> &mut MutableRepo {
178 self.mut_repo
179 }
180
181 pub fn old_commit(&self) -> &Commit {
183 &self.old_commit
184 }
185
186 pub fn new_parents(&self) -> &[CommitId] {
188 &self.new_parents
189 }
190
191 pub fn set_new_parents(&mut self, new_parents: Vec<CommitId>) {
193 self.new_parents = new_parents;
194 }
195
196 pub fn set_new_rewritten_parents(&mut self, unrewritten_parents: &[CommitId]) {
199 self.new_parents = self.mut_repo.new_parents(unrewritten_parents);
200 }
201
202 pub fn replace_parent<'a>(
205 &mut self,
206 old_parent: &CommitId,
207 new_parents: impl IntoIterator<Item = &'a CommitId>,
208 ) {
209 if let Some(i) = self.new_parents.iter().position(|p| p == old_parent) {
210 self.new_parents
211 .splice(i..i + 1, new_parents.into_iter().cloned());
212 let mut unique = HashSet::new();
213 self.new_parents.retain(|p| unique.insert(p.clone()));
214 }
215 }
216
217 pub fn parents_changed(&self) -> bool {
220 self.new_parents != self.old_commit.parent_ids()
221 }
222
223 pub fn simplify_ancestor_merge(&mut self) -> IndexResult<()> {
226 let head_set: HashSet<_> = self
227 .mut_repo
228 .index()
229 .heads(&mut self.new_parents.iter())?
230 .into_iter()
231 .collect();
232 self.new_parents.retain(|parent| head_set.contains(parent));
233 Ok(())
234 }
235
236 pub fn abandon(self) {
240 let old_commit_id = self.old_commit.id().clone();
241 let new_parents = self.new_parents;
242 self.mut_repo
243 .record_abandoned_commit_with_parents(old_commit_id, new_parents);
244 }
245
246 pub async fn rebase_with_empty_behavior(
249 self,
250 empty: EmptyBehavior,
251 ) -> BackendResult<Option<CommitBuilder<'repo>>> {
252 let old_parents_fut = self.old_commit.parents_async();
253 let new_parents_fut = try_join_all(
254 self.new_parents
255 .iter()
256 .map(|new_parent_id| self.mut_repo.store().get_commit_async(new_parent_id)),
257 );
258 let (old_parents, new_parents) = try_join!(old_parents_fut, new_parents_fut)?;
259 let old_parent_trees = old_parents
260 .iter()
261 .map(|parent| parent.tree_id().clone())
262 .collect_vec();
263 let new_parent_trees = new_parents
264 .iter()
265 .map(|parent| parent.tree_id().clone())
266 .collect_vec();
267
268 let (was_empty, new_tree_id) = if new_parent_trees == old_parent_trees {
269 (
270 true,
273 self.old_commit.tree_id().clone(),
275 )
276 } else {
277 let old_base_tree_fut = merge_commit_trees(self.mut_repo, &old_parents);
281 let new_base_tree_fut = merge_commit_trees(self.mut_repo, &new_parents);
282 let old_tree_fut = self.old_commit.tree_async();
283 let (old_base_tree, new_base_tree, old_tree) =
284 try_join!(old_base_tree_fut, new_base_tree_fut, old_tree_fut)?;
285 (
286 old_base_tree.id() == *self.old_commit.tree_id(),
287 new_base_tree.merge(old_base_tree, old_tree).await?.id(),
288 )
289 };
290 if let [parent] = &new_parents[..] {
293 let should_abandon = match empty {
294 EmptyBehavior::Keep => false,
295 EmptyBehavior::AbandonNewlyEmpty => *parent.tree_id() == new_tree_id && !was_empty,
296 EmptyBehavior::AbandonAllEmpty => *parent.tree_id() == new_tree_id,
297 };
298 if should_abandon {
299 self.abandon();
300 return Ok(None);
301 }
302 }
303
304 let builder = self
305 .mut_repo
306 .rewrite_commit(&self.old_commit)
307 .set_parents(self.new_parents)
308 .set_tree_id(new_tree_id);
309 Ok(Some(builder))
310 }
311
312 pub async fn rebase(self) -> BackendResult<CommitBuilder<'repo>> {
315 let builder = self.rebase_with_empty_behavior(EmptyBehavior::Keep).await?;
316 Ok(builder.unwrap())
317 }
318
319 pub fn reparent(self) -> CommitBuilder<'repo> {
322 self.mut_repo
323 .rewrite_commit(&self.old_commit)
324 .set_parents(self.new_parents)
325 }
326}
327
328pub enum RebasedCommit {
329 Rewritten(Commit),
330 Abandoned { parent_id: CommitId },
331}
332
333pub fn rebase_commit_with_options(
334 mut rewriter: CommitRewriter<'_>,
335 options: &RebaseOptions,
336) -> BackendResult<RebasedCommit> {
337 if options.simplify_ancestor_merge {
339 rewriter
340 .simplify_ancestor_merge()
341 .map_err(|err| BackendError::Other(err.into()))?;
343 }
344
345 let single_parent = match &rewriter.new_parents[..] {
346 [parent_id] => Some(parent_id.clone()),
347 _ => None,
348 };
349 let new_parents_len = rewriter.new_parents.len();
350 if let Some(builder) = rewriter
351 .rebase_with_empty_behavior(options.empty)
352 .block_on()?
353 {
354 let new_commit = builder.write()?;
355 Ok(RebasedCommit::Rewritten(new_commit))
356 } else {
357 assert_eq!(new_parents_len, 1);
358 Ok(RebasedCommit::Abandoned {
359 parent_id: single_parent.unwrap(),
360 })
361 }
362}
363
364pub fn rebase_to_dest_parent(
366 repo: &dyn Repo,
367 sources: &[Commit],
368 destination: &Commit,
369) -> BackendResult<MergedTree> {
370 if let [source] = sources
371 && source.parent_ids() == destination.parent_ids()
372 {
373 return source.tree();
374 }
375 sources.iter().try_fold(
376 destination.parent_tree(repo)?,
377 |destination_tree, source| {
378 let source_parent_tree = source.parent_tree(repo)?;
379 let source_tree = source.tree()?;
380 destination_tree
381 .merge(source_parent_tree, source_tree)
382 .block_on()
383 },
384 )
385}
386
387#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
388pub enum EmptyBehavior {
389 #[default]
391 Keep,
392 AbandonNewlyEmpty,
396 AbandonAllEmpty,
400}
401
402#[derive(Clone, Debug, Default)]
409pub struct RebaseOptions {
410 pub empty: EmptyBehavior,
411 pub rewrite_refs: RewriteRefsOptions,
412 pub simplify_ancestor_merge: bool,
415}
416
417#[derive(Clone, Debug, Default)]
419pub struct RewriteRefsOptions {
420 pub delete_abandoned_bookmarks: bool,
425}
426
427pub struct MoveCommitsStats {
428 pub num_rebased_targets: u32,
430 pub num_rebased_descendants: u32,
432 pub num_skipped_rebases: u32,
435 pub num_abandoned_empty: u32,
437 pub rebased_commits: HashMap<CommitId, RebasedCommit>,
439}
440
441#[derive(Clone, Debug)]
443pub struct MoveCommitsLocation {
444 pub new_parent_ids: Vec<CommitId>,
445 pub new_child_ids: Vec<CommitId>,
446 pub target: MoveCommitsTarget,
447}
448
449#[derive(Clone, Debug)]
450pub enum MoveCommitsTarget {
451 Commits(Vec<CommitId>),
454 Roots(Vec<CommitId>),
456}
457
458#[derive(Clone, Debug)]
459pub struct ComputedMoveCommits {
460 target_commit_ids: IndexSet<CommitId>,
461 descendants: Vec<Commit>,
462 commit_new_parents_map: HashMap<CommitId, Vec<CommitId>>,
463 to_abandon: HashSet<CommitId>,
464}
465
466impl ComputedMoveCommits {
467 fn empty() -> Self {
468 Self {
469 target_commit_ids: IndexSet::new(),
470 descendants: vec![],
471 commit_new_parents_map: HashMap::new(),
472 to_abandon: HashSet::new(),
473 }
474 }
475
476 pub fn record_to_abandon(&mut self, commit_ids: impl IntoIterator<Item = CommitId>) {
483 self.to_abandon.extend(commit_ids);
484 }
485
486 pub fn apply(
487 self,
488 mut_repo: &mut MutableRepo,
489 options: &RebaseOptions,
490 ) -> BackendResult<MoveCommitsStats> {
491 apply_move_commits(mut_repo, self, options)
492 }
493}
494
495pub fn move_commits(
504 mut_repo: &mut MutableRepo,
505 loc: &MoveCommitsLocation,
506 options: &RebaseOptions,
507) -> BackendResult<MoveCommitsStats> {
508 compute_move_commits(mut_repo, loc)?.apply(mut_repo, options)
509}
510
511pub fn compute_move_commits(
512 repo: &MutableRepo,
513 loc: &MoveCommitsLocation,
514) -> BackendResult<ComputedMoveCommits> {
515 let target_commit_ids: IndexSet<CommitId>;
516 let connected_target_commits: Vec<Commit>;
517 let connected_target_commits_internal_parents: HashMap<CommitId, IndexSet<CommitId>>;
518 let target_roots: HashSet<CommitId>;
519
520 match &loc.target {
521 MoveCommitsTarget::Commits(commit_ids) => {
522 if commit_ids.is_empty() {
523 return Ok(ComputedMoveCommits::empty());
524 }
525
526 target_commit_ids = commit_ids.iter().cloned().collect();
527
528 connected_target_commits = RevsetExpression::commits(commit_ids.clone())
529 .connected()
530 .evaluate(repo)
531 .map_err(|err| err.into_backend_error())?
532 .iter()
533 .commits(repo.store())
534 .try_collect()
535 .map_err(|err| err.into_backend_error())?;
536 connected_target_commits_internal_parents =
537 compute_internal_parents_within(&target_commit_ids, &connected_target_commits);
538
539 target_roots = connected_target_commits_internal_parents
540 .iter()
541 .filter(|&(commit_id, parents)| {
542 target_commit_ids.contains(commit_id) && parents.is_empty()
543 })
544 .map(|(commit_id, _)| commit_id.clone())
545 .collect();
546 }
547 MoveCommitsTarget::Roots(root_ids) => {
548 if root_ids.is_empty() {
549 return Ok(ComputedMoveCommits::empty());
550 }
551
552 target_commit_ids = RevsetExpression::commits(root_ids.clone())
553 .descendants()
554 .evaluate(repo)
555 .map_err(|err| err.into_backend_error())?
556 .iter()
557 .try_collect()
558 .map_err(|err| err.into_backend_error())?;
559
560 connected_target_commits = target_commit_ids
561 .iter()
562 .map(|id| repo.store().get_commit(id))
563 .try_collect()?;
564 connected_target_commits_internal_parents = HashMap::new();
567 target_roots = root_ids.iter().cloned().collect();
568 }
569 }
570
571 let mut target_commits_external_parents: HashMap<CommitId, IndexSet<CommitId>> = HashMap::new();
575 for id in target_commit_ids.iter().rev() {
576 let commit = repo.store().get_commit(id)?;
577 let mut new_parents = IndexSet::new();
578 for old_parent in commit.parent_ids() {
579 if let Some(parents) = target_commits_external_parents.get(old_parent) {
580 new_parents.extend(parents.iter().cloned());
581 } else {
582 new_parents.insert(old_parent.clone());
583 }
584 }
585 target_commits_external_parents.insert(commit.id().clone(), new_parents);
586 }
587
588 let new_parent_ids: Vec<_> = loc
592 .new_parent_ids
593 .iter()
594 .flat_map(|parent_id| {
595 if let Some(parent_ids) = target_commits_external_parents.get(parent_id) {
596 parent_ids.iter().cloned().collect_vec()
597 } else {
598 vec![parent_id.clone()]
599 }
600 })
601 .collect();
602
603 let new_children: Vec<_> = if loc
607 .new_child_ids
608 .iter()
609 .any(|id| target_commit_ids.contains(id))
610 {
611 let target_commits_descendants: Vec<_> =
612 RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
613 .union(
614 &RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
615 .children(),
616 )
617 .evaluate(repo)
618 .map_err(|err| err.into_backend_error())?
619 .iter()
620 .commits(repo.store())
621 .try_collect()
622 .map_err(|err| err.into_backend_error())?;
623
624 let mut target_commit_external_descendants: HashMap<CommitId, IndexSet<Commit>> =
627 HashMap::new();
628 for commit in &target_commits_descendants {
631 if !target_commit_external_descendants.contains_key(commit.id()) {
632 let children = if target_commit_ids.contains(commit.id()) {
633 IndexSet::new()
634 } else {
635 IndexSet::from([commit.clone()])
636 };
637 target_commit_external_descendants.insert(commit.id().clone(), children);
638 }
639
640 let children = target_commit_external_descendants
641 .get(commit.id())
642 .unwrap()
643 .iter()
644 .cloned()
645 .collect_vec();
646 for parent_id in commit.parent_ids() {
647 if target_commit_ids.contains(parent_id) {
648 if let Some(target_children) =
649 target_commit_external_descendants.get_mut(parent_id)
650 {
651 target_children.extend(children.iter().cloned());
652 } else {
653 target_commit_external_descendants
654 .insert(parent_id.clone(), children.iter().cloned().collect());
655 }
656 };
657 }
658 }
659
660 let mut new_children = Vec::new();
661 for id in &loc.new_child_ids {
662 if let Some(children) = target_commit_external_descendants.get(id) {
663 new_children.extend(children.iter().cloned());
664 } else {
665 new_children.push(repo.store().get_commit(id)?);
666 }
667 }
668 new_children
669 } else {
670 loc.new_child_ids
671 .iter()
672 .map(|id| repo.store().get_commit(id))
673 .try_collect()?
674 };
675
676 let new_children_parents: HashMap<_, _> = if !new_children.is_empty() {
679 let target_heads = compute_commits_heads(&target_commit_ids, &connected_target_commits);
682
683 new_children
684 .iter()
685 .map(|child_commit| {
686 let mut new_child_parent_ids = IndexSet::new();
687 for old_child_parent_id in child_commit.parent_ids() {
688 let old_child_parent_ids = if let Some(parents) =
690 target_commits_external_parents.get(old_child_parent_id)
691 {
692 parents.iter().collect_vec()
693 } else {
694 vec![old_child_parent_id]
695 };
696
697 for id in old_child_parent_ids {
701 if new_parent_ids.contains(id) {
702 new_child_parent_ids.extend(target_heads.clone());
703 } else {
704 new_child_parent_ids.insert(id.clone());
705 };
706 }
707 }
708
709 new_child_parent_ids.extend(target_heads.clone());
712
713 (
714 child_commit.id().clone(),
715 new_child_parent_ids.into_iter().collect_vec(),
716 )
717 })
718 .collect()
719 } else {
720 HashMap::new()
721 };
722
723 let mut roots = target_roots.iter().cloned().collect_vec();
726 roots.extend(new_children.iter().ids().cloned());
727
728 let descendants = repo.find_descendants_for_rebase(roots.clone())?;
729 let commit_new_parents_map = descendants
730 .iter()
731 .map(|commit| -> BackendResult<_> {
732 let commit_id = commit.id();
733 let new_parent_ids =
734 if let Some(new_child_parents) = new_children_parents.get(commit_id) {
735 new_child_parents.clone()
737 } else if target_commit_ids.contains(commit_id) {
738 if target_roots.contains(commit_id) {
740 new_parent_ids.clone()
743 } else {
744 let mut new_parents = vec![];
752 for parent_id in commit.parent_ids() {
753 if target_commit_ids.contains(parent_id) {
754 new_parents.push(parent_id.clone());
755 } else if let Some(parents) =
756 connected_target_commits_internal_parents.get(parent_id)
757 {
758 new_parents.extend(parents.iter().cloned());
759 } else if !fallible_any(&new_children, |child| {
760 repo.index().is_ancestor(child.id(), parent_id)
761 })
762 .map_err(|err| BackendError::Other(err.into()))?
764 {
765 new_parents.push(parent_id.clone());
766 }
767 }
768 new_parents
769 }
770 } else if commit
771 .parent_ids()
772 .iter()
773 .any(|id| target_commits_external_parents.contains_key(id))
774 {
775 let mut new_parents = vec![];
778 for parent in commit.parent_ids() {
779 if let Some(parents) = target_commits_external_parents.get(parent) {
780 new_parents.extend(parents.iter().cloned());
781 } else {
782 new_parents.push(parent.clone());
783 }
784 }
785 new_parents
786 } else {
787 commit.parent_ids().iter().cloned().collect_vec()
788 };
789 Ok((commit.id().clone(), new_parent_ids))
790 })
791 .try_collect()?;
792
793 Ok(ComputedMoveCommits {
794 target_commit_ids,
795 descendants,
796 commit_new_parents_map,
797 to_abandon: HashSet::new(),
798 })
799}
800
801fn apply_move_commits(
802 mut_repo: &mut MutableRepo,
803 commits: ComputedMoveCommits,
804 options: &RebaseOptions,
805) -> BackendResult<MoveCommitsStats> {
806 let mut num_rebased_targets = 0;
807 let mut num_rebased_descendants = 0;
808 let mut num_skipped_rebases = 0;
809 let mut num_abandoned_empty = 0;
810
811 let rebase_descendant_options = &RebaseOptions {
813 empty: EmptyBehavior::Keep,
814 rewrite_refs: options.rewrite_refs.clone(),
815 simplify_ancestor_merge: options.simplify_ancestor_merge,
816 };
817
818 let mut rebased_commits: HashMap<CommitId, RebasedCommit> = HashMap::new();
819 mut_repo.transform_commits(
820 commits.descendants,
821 &commits.commit_new_parents_map,
822 &options.rewrite_refs,
823 async |rewriter| {
824 let old_commit_id = rewriter.old_commit().id().clone();
825 if commits.to_abandon.contains(&old_commit_id) {
826 rewriter.abandon();
827 } else if rewriter.parents_changed() {
828 let is_target_commit = commits.target_commit_ids.contains(&old_commit_id);
829 let rebased_commit = rebase_commit_with_options(
830 rewriter,
831 if is_target_commit {
832 options
833 } else {
834 rebase_descendant_options
835 },
836 )?;
837 if let RebasedCommit::Abandoned { .. } = rebased_commit {
838 num_abandoned_empty += 1;
839 } else if is_target_commit {
840 num_rebased_targets += 1;
841 } else {
842 num_rebased_descendants += 1;
843 }
844 rebased_commits.insert(old_commit_id, rebased_commit);
845 } else {
846 num_skipped_rebases += 1;
847 }
848
849 Ok(())
850 },
851 )?;
852
853 Ok(MoveCommitsStats {
854 num_rebased_targets,
855 num_rebased_descendants,
856 num_skipped_rebases,
857 num_abandoned_empty,
858 rebased_commits,
859 })
860}
861
862#[derive(Default)]
863pub struct DuplicateCommitsStats {
864 pub duplicated_commits: IndexMap<CommitId, Commit>,
866 pub num_rebased: u32,
869}
870
871pub async fn duplicate_commits(
889 mut_repo: &mut MutableRepo,
890 target_commit_ids: &[CommitId],
891 target_descriptions: &HashMap<CommitId, String>,
892 parent_commit_ids: &[CommitId],
893 children_commit_ids: &[CommitId],
894) -> BackendResult<DuplicateCommitsStats> {
895 if target_commit_ids.is_empty() {
896 return Ok(DuplicateCommitsStats::default());
897 }
898
899 let mut duplicated_old_to_new: IndexMap<CommitId, Commit> = IndexMap::new();
900 let mut num_rebased = 0;
901
902 let target_commit_ids: IndexSet<_> = target_commit_ids.iter().cloned().collect();
903
904 let connected_target_commits: Vec<_> =
905 RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
906 .connected()
907 .evaluate(mut_repo)
908 .map_err(|err| err.into_backend_error())?
909 .iter()
910 .commits(mut_repo.store())
911 .try_collect()
912 .map_err(|err| err.into_backend_error())?;
913
914 let target_commits_internal_parents = {
921 let mut target_commits_internal_parents =
922 compute_internal_parents_within(&target_commit_ids, &connected_target_commits);
923 target_commits_internal_parents.retain(|id, _| target_commit_ids.contains(id));
924 target_commits_internal_parents
925 };
926
927 let target_root_ids: HashSet<_> = target_commits_internal_parents
929 .iter()
930 .filter(|(_, parents)| parents.is_empty())
931 .map(|(commit_id, _)| commit_id.clone())
932 .collect();
933
934 let target_head_ids = if !children_commit_ids.is_empty() {
937 compute_commits_heads(&target_commit_ids, &connected_target_commits)
938 } else {
939 vec![]
940 };
941
942 for original_commit_id in target_commit_ids.iter().rev() {
945 let original_commit = mut_repo
946 .store()
947 .get_commit_async(original_commit_id)
948 .await?;
949 let new_parent_ids = if target_root_ids.contains(original_commit_id) {
950 parent_commit_ids.to_vec()
951 } else {
952 target_commits_internal_parents
953 .get(original_commit_id)
954 .unwrap()
955 .iter()
956 .map(|id| {
958 duplicated_old_to_new
959 .get(id)
960 .map_or(id, |commit| commit.id())
961 .clone()
962 })
963 .collect()
964 };
965 let mut new_commit_builder = CommitRewriter::new(mut_repo, original_commit, new_parent_ids)
966 .rebase()
967 .await?
968 .clear_rewrite_source()
969 .generate_new_change_id();
970 if let Some(desc) = target_descriptions.get(original_commit_id) {
971 new_commit_builder = new_commit_builder.set_description(desc);
972 }
973 duplicated_old_to_new.insert(original_commit_id.clone(), new_commit_builder.write()?);
974 }
975
976 let target_head_ids = target_head_ids
979 .into_iter()
980 .map(|commit_id| {
981 duplicated_old_to_new
982 .get(&commit_id)
983 .map_or(commit_id, |commit| commit.id().clone())
984 })
985 .collect_vec();
986
987 let children_commit_ids_set: HashSet<CommitId> = children_commit_ids.iter().cloned().collect();
989 mut_repo.transform_descendants(children_commit_ids.to_vec(), async |mut rewriter| {
990 if children_commit_ids_set.contains(rewriter.old_commit().id()) {
991 let mut child_new_parent_ids = IndexSet::new();
992 for old_parent_id in rewriter.old_commit().parent_ids() {
993 if parent_commit_ids.contains(old_parent_id) {
998 child_new_parent_ids.extend(target_head_ids.clone());
999 } else {
1000 child_new_parent_ids.insert(old_parent_id.clone());
1001 }
1002 }
1003 child_new_parent_ids.extend(target_head_ids.clone());
1006 rewriter.set_new_parents(child_new_parent_ids.into_iter().collect());
1007 }
1008 num_rebased += 1;
1009 rewriter.rebase().await?.write()?;
1010 Ok(())
1011 })?;
1012
1013 Ok(DuplicateCommitsStats {
1014 duplicated_commits: duplicated_old_to_new,
1015 num_rebased,
1016 })
1017}
1018
1019pub fn duplicate_commits_onto_parents(
1029 mut_repo: &mut MutableRepo,
1030 target_commits: &[CommitId],
1031 target_descriptions: &HashMap<CommitId, String>,
1032) -> BackendResult<DuplicateCommitsStats> {
1033 if target_commits.is_empty() {
1034 return Ok(DuplicateCommitsStats::default());
1035 }
1036
1037 let mut duplicated_old_to_new: IndexMap<CommitId, Commit> = IndexMap::new();
1038
1039 for original_commit_id in target_commits.iter().rev() {
1042 let original_commit = mut_repo.store().get_commit(original_commit_id)?;
1043 let new_parent_ids = original_commit
1044 .parent_ids()
1045 .iter()
1046 .map(|id| {
1047 duplicated_old_to_new
1048 .get(id)
1049 .map_or(id, |commit| commit.id())
1050 .clone()
1051 })
1052 .collect();
1053 let mut new_commit_builder = mut_repo
1054 .rewrite_commit(&original_commit)
1055 .clear_rewrite_source()
1056 .generate_new_change_id()
1057 .set_parents(new_parent_ids);
1058 if let Some(desc) = target_descriptions.get(original_commit_id) {
1059 new_commit_builder = new_commit_builder.set_description(desc);
1060 }
1061 duplicated_old_to_new.insert(original_commit_id.clone(), new_commit_builder.write()?);
1062 }
1063
1064 Ok(DuplicateCommitsStats {
1065 duplicated_commits: duplicated_old_to_new,
1066 num_rebased: 0,
1067 })
1068}
1069
1070fn compute_internal_parents_within(
1078 target_commit_ids: &IndexSet<CommitId>,
1079 graph_commits: &[Commit],
1080) -> HashMap<CommitId, IndexSet<CommitId>> {
1081 let mut internal_parents: HashMap<CommitId, IndexSet<CommitId>> = HashMap::new();
1082 for commit in graph_commits.iter().rev() {
1083 let mut new_parents = IndexSet::new();
1086 for old_parent in commit.parent_ids() {
1087 if target_commit_ids.contains(old_parent) {
1088 new_parents.insert(old_parent.clone());
1089 } else if let Some(parents) = internal_parents.get(old_parent) {
1090 new_parents.extend(parents.iter().cloned());
1091 }
1092 }
1093 internal_parents.insert(commit.id().clone(), new_parents);
1094 }
1095 internal_parents
1096}
1097
1098fn compute_commits_heads(
1104 target_commit_ids: &IndexSet<CommitId>,
1105 connected_target_commits: &[Commit],
1106) -> Vec<CommitId> {
1107 let mut target_head_ids: HashSet<CommitId> = HashSet::new();
1108 for commit in connected_target_commits.iter().rev() {
1109 target_head_ids.insert(commit.id().clone());
1110 for old_parent in commit.parent_ids() {
1111 target_head_ids.remove(old_parent);
1112 }
1113 }
1114 connected_target_commits
1115 .iter()
1116 .rev()
1117 .filter(|commit| {
1118 target_head_ids.contains(commit.id()) && target_commit_ids.contains(commit.id())
1119 })
1120 .map(|commit| commit.id().clone())
1121 .collect_vec()
1122}
1123
1124pub struct CommitWithSelection {
1125 pub commit: Commit,
1126 pub selected_tree: MergedTree,
1127 pub parent_tree: MergedTree,
1128}
1129
1130impl CommitWithSelection {
1131 pub fn is_full_selection(&self) -> bool {
1133 &self.selected_tree.id() == self.commit.tree_id()
1134 }
1135
1136 pub fn is_empty_selection(&self) -> bool {
1142 self.selected_tree.id() == self.parent_tree.id()
1143 }
1144}
1145
1146#[must_use]
1148pub struct SquashedCommit<'repo> {
1149 pub commit_builder: CommitBuilder<'repo>,
1151 pub abandoned_commits: Vec<Commit>,
1153}
1154
1155pub fn squash_commits<'repo>(
1159 repo: &'repo mut MutableRepo,
1160 sources: &[CommitWithSelection],
1161 destination: &Commit,
1162 keep_emptied: bool,
1163) -> BackendResult<Option<SquashedCommit<'repo>>> {
1164 struct SourceCommit<'a> {
1165 commit: &'a CommitWithSelection,
1166 abandon: bool,
1167 }
1168 let mut source_commits = vec![];
1169 for source in sources {
1170 let abandon = !keep_emptied && source.is_full_selection();
1171 if !abandon && source.is_empty_selection() {
1172 continue;
1176 }
1177
1178 source_commits.push(SourceCommit {
1181 commit: source,
1182 abandon,
1183 });
1184 }
1185
1186 if source_commits.is_empty() {
1187 return Ok(None);
1188 }
1189
1190 let mut abandoned_commits = vec![];
1191 for source in &source_commits {
1192 if source.abandon {
1193 repo.record_abandoned_commit(&source.commit.commit);
1194 abandoned_commits.push(source.commit.commit.clone());
1195 } else {
1196 let source_tree = source.commit.commit.tree()?;
1197 let new_source_tree = source_tree
1199 .merge(
1200 source.commit.selected_tree.clone(),
1201 source.commit.parent_tree.clone(),
1202 )
1203 .block_on()?;
1204 repo.rewrite_commit(&source.commit.commit)
1205 .set_tree_id(new_source_tree.id().clone())
1206 .write()?;
1207 }
1208 }
1209
1210 let mut rewritten_destination = destination.clone();
1211 if fallible_any(sources, |source| {
1212 repo.index()
1213 .is_ancestor(source.commit.id(), destination.id())
1214 })
1215 .map_err(|err| BackendError::Other(err.into()))?
1217 {
1218 let options = RebaseOptions::default();
1223 repo.rebase_descendants_with_options(&options, |old_commit, rebased_commit| {
1224 if old_commit.id() != destination.id() {
1225 return;
1226 }
1227 rewritten_destination = match rebased_commit {
1228 RebasedCommit::Rewritten(commit) => commit,
1229 RebasedCommit::Abandoned { .. } => panic!("all commits should be kept"),
1230 };
1231 })?;
1232 }
1233 let mut destination_tree = rewritten_destination.tree()?;
1235 for source in &source_commits {
1236 destination_tree = destination_tree
1237 .merge(
1238 source.commit.parent_tree.clone(),
1239 source.commit.selected_tree.clone(),
1240 )
1241 .block_on()?;
1242 }
1243 let mut predecessors = vec![destination.id().clone()];
1244 predecessors.extend(
1245 source_commits
1246 .iter()
1247 .map(|source| source.commit.commit.id().clone()),
1248 );
1249
1250 let commit_builder = repo
1251 .rewrite_commit(&rewritten_destination)
1252 .set_tree_id(destination_tree.id().clone())
1253 .set_predecessors(predecessors);
1254 Ok(Some(SquashedCommit {
1255 commit_builder,
1256 abandoned_commits,
1257 }))
1258}
1259
1260pub fn find_duplicate_divergent_commits(
1264 repo: &dyn Repo,
1265 new_parent_ids: &[CommitId],
1266 target: &MoveCommitsTarget,
1267) -> BackendResult<Vec<Commit>> {
1268 let target_commits: Vec<Commit> = match target {
1269 MoveCommitsTarget::Commits(commit_ids) => commit_ids
1270 .iter()
1271 .map(|commit_id| repo.store().get_commit(commit_id))
1272 .try_collect()?,
1273 MoveCommitsTarget::Roots(root_ids) => RevsetExpression::commits(root_ids.clone())
1274 .descendants()
1275 .evaluate(repo)
1276 .map_err(|err| err.into_backend_error())?
1277 .iter()
1278 .commits(repo.store())
1279 .try_collect()
1280 .map_err(|err| err.into_backend_error())?,
1281 };
1282 let target_commit_ids: HashSet<&CommitId> = target_commits.iter().map(Commit::id).collect();
1283
1284 let divergent_changes: Vec<(&Commit, Vec<CommitId>)> = target_commits
1287 .iter()
1288 .map(|target_commit| -> Result<_, BackendError> {
1289 let mut ancestor_candidates = repo
1290 .resolve_change_id(target_commit.change_id())
1291 .map_err(|err| BackendError::Other(err.into()))?
1293 .unwrap_or_default();
1294 ancestor_candidates.retain(|commit_id| !target_commit_ids.contains(commit_id));
1295 Ok((target_commit, ancestor_candidates))
1296 })
1297 .filter_ok(|(_, candidates)| !candidates.is_empty())
1298 .try_collect()?;
1299 if divergent_changes.is_empty() {
1300 return Ok(Vec::new());
1301 }
1302
1303 let target_root_ids = match target {
1304 MoveCommitsTarget::Commits(commit_ids) => commit_ids,
1305 MoveCommitsTarget::Roots(root_ids) => root_ids,
1306 };
1307
1308 let is_new_ancestor = RevsetExpression::commits(target_root_ids.clone())
1311 .range(&RevsetExpression::commits(new_parent_ids.to_owned()))
1312 .evaluate(repo)
1313 .map_err(|err| err.into_backend_error())?
1314 .containing_fn();
1315
1316 let mut duplicate_divergent = Vec::new();
1317 for (target_commit, ancestor_candidates) in divergent_changes {
1322 for ancestor_candidate_id in ancestor_candidates {
1323 if !is_new_ancestor(&ancestor_candidate_id).map_err(|err| err.into_backend_error())? {
1324 continue;
1325 }
1326
1327 let ancestor_candidate = repo.store().get_commit(&ancestor_candidate_id)?;
1328 let new_tree =
1329 rebase_to_dest_parent(repo, slice::from_ref(target_commit), &ancestor_candidate)?;
1330 if new_tree.id() == *ancestor_candidate.tree_id() {
1333 duplicate_divergent.push(target_commit.clone());
1334 break;
1335 }
1336 }
1337 }
1338 Ok(duplicate_divergent)
1339}