1#![expect(missing_docs)]
16
17use std::collections::HashMap;
18use std::collections::HashSet;
19use std::slice;
20use std::sync::Arc;
21
22use futures::StreamExt as _;
23use futures::future::try_join_all;
24use futures::try_join;
25use indexmap::IndexMap;
26use indexmap::IndexSet;
27use itertools::Itertools as _;
28use pollster::FutureExt as _;
29use tracing::instrument;
30
31use crate::backend::BackendError;
32use crate::backend::BackendResult;
33use crate::backend::CommitId;
34use crate::commit::Commit;
35use crate::commit::CommitIteratorExt as _;
36use crate::commit_builder::CommitBuilder;
37use crate::index::Index;
38use crate::index::IndexResult;
39use crate::iter_util::fallible_any;
40use crate::matchers::Matcher;
41use crate::matchers::Visit;
42use crate::merge::Merge;
43use crate::merged_tree::MergedTree;
44use crate::merged_tree::MergedTreeBuilder;
45use crate::merged_tree::TreeDiffEntry;
46use crate::repo::MutableRepo;
47use crate::repo::Repo;
48use crate::repo_path::RepoPath;
49use crate::revset::RevsetExpression;
50use crate::revset::RevsetIteratorExt as _;
51use crate::store::Store;
52
53#[instrument(skip(repo))]
55pub async fn merge_commit_trees(repo: &dyn Repo, commits: &[Commit]) -> BackendResult<MergedTree> {
56 if let [commit] = commits {
57 Ok(commit.tree())
58 } else {
59 merge_commit_trees_no_resolve_without_repo(repo.store(), repo.index(), commits)
60 .await?
61 .resolve()
62 .await
63 }
64}
65
66#[instrument(skip(index))]
68pub async fn merge_commit_trees_no_resolve_without_repo(
69 store: &Arc<Store>,
70 index: &dyn Index,
71 commits: &[Commit],
72) -> BackendResult<MergedTree> {
73 let commit_ids = commits
74 .iter()
75 .map(|commit| commit.id().clone())
76 .collect_vec();
77 let commit_id_merge = find_recursive_merge_commits(store, index, commit_ids)?;
78 let tree_id_merge = commit_id_merge
79 .try_map_async(async |commit_id| {
80 let commit = store.get_commit_async(commit_id).await?;
81 Ok::<_, BackendError>(commit.tree_ids().clone())
82 })
83 .await?;
84 Ok(MergedTree::new(
85 store.clone(),
86 tree_id_merge.flatten().simplify(),
87 ))
88}
89
90pub fn find_recursive_merge_commits(
92 store: &Arc<Store>,
93 index: &dyn Index,
94 mut commit_ids: Vec<CommitId>,
95) -> BackendResult<Merge<CommitId>> {
96 if commit_ids.is_empty() {
97 Ok(Merge::resolved(store.root_commit_id().clone()))
98 } else if commit_ids.len() == 1 {
99 Ok(Merge::resolved(commit_ids.pop().unwrap()))
100 } else {
101 let mut result = Merge::resolved(commit_ids[0].clone());
102 for (i, other_commit_id) in commit_ids.iter().enumerate().skip(1) {
103 let ancestor_ids = index
104 .common_ancestors(&commit_ids[0..i], &commit_ids[i..][..1])
105 .map_err(|err| BackendError::Other(err.into()))?;
107 let ancestor_merge = find_recursive_merge_commits(store, index, ancestor_ids)?;
108 result = Merge::from_vec(vec![
109 result,
110 ancestor_merge,
111 Merge::resolved(other_commit_id.clone()),
112 ])
113 .flatten();
114 }
115 Ok(result)
116 }
117}
118
119pub async fn restore_tree(
121 source: &MergedTree,
122 destination: &MergedTree,
123 matcher: &dyn Matcher,
124) -> BackendResult<MergedTree> {
125 if matcher.visit(RepoPath::root()) == Visit::AllRecursively {
126 Ok(source.clone())
128 } else {
129 let mut tree_builder = MergedTreeBuilder::new(destination.clone());
132 let mut diff_stream = source.diff_stream(destination, matcher);
134 while let Some(TreeDiffEntry {
135 path: repo_path,
136 values,
137 }) = diff_stream.next().await
138 {
139 let source_value = values?.before;
140 tree_builder.set_or_remove(repo_path, source_value);
141 }
142 tree_builder.write_tree()
143 }
144}
145
146pub async fn rebase_commit(
147 mut_repo: &mut MutableRepo,
148 old_commit: Commit,
149 new_parents: Vec<CommitId>,
150) -> BackendResult<Commit> {
151 let rewriter = CommitRewriter::new(mut_repo, old_commit, new_parents);
152 let builder = rewriter.rebase().await?;
153 builder.write()
154}
155
156pub struct CommitRewriter<'repo> {
158 mut_repo: &'repo mut MutableRepo,
159 old_commit: Commit,
160 new_parents: Vec<CommitId>,
161}
162
163impl<'repo> CommitRewriter<'repo> {
164 pub fn new(
166 mut_repo: &'repo mut MutableRepo,
167 old_commit: Commit,
168 new_parents: Vec<CommitId>,
169 ) -> Self {
170 Self {
171 mut_repo,
172 old_commit,
173 new_parents,
174 }
175 }
176
177 pub fn repo_mut(&mut self) -> &mut MutableRepo {
179 self.mut_repo
180 }
181
182 pub fn old_commit(&self) -> &Commit {
184 &self.old_commit
185 }
186
187 pub fn new_parents(&self) -> &[CommitId] {
189 &self.new_parents
190 }
191
192 pub fn set_new_parents(&mut self, new_parents: Vec<CommitId>) {
194 self.new_parents = new_parents;
195 }
196
197 pub fn set_new_rewritten_parents(&mut self, unrewritten_parents: &[CommitId]) {
200 self.new_parents = self.mut_repo.new_parents(unrewritten_parents);
201 }
202
203 pub fn replace_parent<'a>(
206 &mut self,
207 old_parent: &CommitId,
208 new_parents: impl IntoIterator<Item = &'a CommitId>,
209 ) {
210 if let Some(i) = self.new_parents.iter().position(|p| p == old_parent) {
211 self.new_parents
212 .splice(i..i + 1, new_parents.into_iter().cloned());
213 let mut unique = HashSet::new();
214 self.new_parents.retain(|p| unique.insert(p.clone()));
215 }
216 }
217
218 pub fn parents_changed(&self) -> bool {
221 self.new_parents != self.old_commit.parent_ids()
222 }
223
224 pub fn simplify_ancestor_merge(&mut self) -> IndexResult<()> {
227 let head_set: HashSet<_> = self
228 .mut_repo
229 .index()
230 .heads(&mut self.new_parents.iter())?
231 .into_iter()
232 .collect();
233 self.new_parents.retain(|parent| head_set.contains(parent));
234 Ok(())
235 }
236
237 pub fn abandon(self) {
241 let old_commit_id = self.old_commit.id().clone();
242 let new_parents = self.new_parents;
243 self.mut_repo
244 .record_abandoned_commit_with_parents(old_commit_id, new_parents);
245 }
246
247 pub async fn rebase_with_empty_behavior(
250 self,
251 empty: EmptyBehavior,
252 ) -> BackendResult<Option<CommitBuilder<'repo>>> {
253 let old_parents_fut = self.old_commit.parents_async();
254 let new_parents_fut = try_join_all(
255 self.new_parents
256 .iter()
257 .map(|new_parent_id| self.mut_repo.store().get_commit_async(new_parent_id)),
258 );
259 let (old_parents, new_parents) = try_join!(old_parents_fut, new_parents_fut)?;
260 let old_parent_trees = old_parents
261 .iter()
262 .map(|parent| parent.tree_ids().clone())
263 .collect_vec();
264 let new_parent_trees = new_parents
265 .iter()
266 .map(|parent| parent.tree_ids().clone())
267 .collect_vec();
268
269 let (was_empty, new_tree) = if new_parent_trees == old_parent_trees {
270 (
271 true,
274 self.old_commit.tree(),
276 )
277 } else {
278 let old_base_tree_fut = merge_commit_trees(self.mut_repo, &old_parents);
282 let new_base_tree_fut = merge_commit_trees(self.mut_repo, &new_parents);
283 let old_tree = self.old_commit.tree();
284 let (old_base_tree, new_base_tree) = try_join!(old_base_tree_fut, new_base_tree_fut)?;
285 (
286 old_base_tree.tree_ids() == self.old_commit.tree_ids(),
287 new_base_tree.merge(old_base_tree, old_tree).await?,
288 )
289 };
290 if let [parent] = &new_parents[..] {
293 let should_abandon = match empty {
294 EmptyBehavior::Keep => false,
295 EmptyBehavior::AbandonNewlyEmpty => {
296 parent.tree_ids() == new_tree.tree_ids() && !was_empty
297 }
298 EmptyBehavior::AbandonAllEmpty => parent.tree_ids() == new_tree.tree_ids(),
299 };
300 if should_abandon {
301 self.abandon();
302 return Ok(None);
303 }
304 }
305
306 let builder = self
307 .mut_repo
308 .rewrite_commit(&self.old_commit)
309 .set_parents(self.new_parents)
310 .set_tree(new_tree);
311 Ok(Some(builder))
312 }
313
314 pub async fn rebase(self) -> BackendResult<CommitBuilder<'repo>> {
317 let builder = self.rebase_with_empty_behavior(EmptyBehavior::Keep).await?;
318 Ok(builder.unwrap())
319 }
320
321 pub fn reparent(self) -> CommitBuilder<'repo> {
324 self.mut_repo
325 .rewrite_commit(&self.old_commit)
326 .set_parents(self.new_parents)
327 }
328}
329
330#[derive(Debug)]
331pub enum RebasedCommit {
332 Rewritten(Commit),
333 Abandoned { parent_id: CommitId },
334}
335
336pub fn rebase_commit_with_options(
337 mut rewriter: CommitRewriter<'_>,
338 options: &RebaseOptions,
339) -> BackendResult<RebasedCommit> {
340 if options.simplify_ancestor_merge {
342 rewriter
343 .simplify_ancestor_merge()
344 .map_err(|err| BackendError::Other(err.into()))?;
346 }
347
348 let single_parent = match &rewriter.new_parents[..] {
349 [parent_id] => Some(parent_id.clone()),
350 _ => None,
351 };
352 let new_parents_len = rewriter.new_parents.len();
353 if let Some(builder) = rewriter
354 .rebase_with_empty_behavior(options.empty)
355 .block_on()?
356 {
357 let new_commit = builder.write()?;
358 Ok(RebasedCommit::Rewritten(new_commit))
359 } else {
360 assert_eq!(new_parents_len, 1);
361 Ok(RebasedCommit::Abandoned {
362 parent_id: single_parent.unwrap(),
363 })
364 }
365}
366
367pub fn rebase_to_dest_parent(
369 repo: &dyn Repo,
370 sources: &[Commit],
371 destination: &Commit,
372) -> BackendResult<MergedTree> {
373 if let [source] = sources
374 && source.parent_ids() == destination.parent_ids()
375 {
376 return Ok(source.tree());
377 }
378 sources.iter().try_fold(
379 destination.parent_tree(repo)?,
380 |destination_tree, source| {
381 let source_parent_tree = source.parent_tree(repo)?;
382 let source_tree = source.tree();
383 destination_tree
384 .merge(source_parent_tree, source_tree)
385 .block_on()
386 },
387 )
388}
389
390#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
391pub enum EmptyBehavior {
392 #[default]
394 Keep,
395 AbandonNewlyEmpty,
399 AbandonAllEmpty,
403}
404
405#[derive(Clone, Debug, Default)]
412pub struct RebaseOptions {
413 pub empty: EmptyBehavior,
414 pub rewrite_refs: RewriteRefsOptions,
415 pub simplify_ancestor_merge: bool,
418}
419
420#[derive(Clone, Debug, Default)]
422pub struct RewriteRefsOptions {
423 pub delete_abandoned_bookmarks: bool,
428}
429
430#[derive(Debug)]
431pub struct MoveCommitsStats {
432 pub num_rebased_targets: u32,
434 pub num_rebased_descendants: u32,
436 pub num_skipped_rebases: u32,
439 pub num_abandoned_empty: u32,
441 pub rebased_commits: HashMap<CommitId, RebasedCommit>,
443}
444
445#[derive(Clone, Debug)]
447pub struct MoveCommitsLocation {
448 pub new_parent_ids: Vec<CommitId>,
449 pub new_child_ids: Vec<CommitId>,
450 pub target: MoveCommitsTarget,
451}
452
453#[derive(Clone, Debug)]
454pub enum MoveCommitsTarget {
455 Commits(Vec<CommitId>),
458 Roots(Vec<CommitId>),
460}
461
462#[derive(Clone, Debug)]
463pub struct ComputedMoveCommits {
464 target_commit_ids: IndexSet<CommitId>,
465 descendants: Vec<Commit>,
466 commit_new_parents_map: HashMap<CommitId, Vec<CommitId>>,
467 to_abandon: HashSet<CommitId>,
468}
469
470impl ComputedMoveCommits {
471 fn empty() -> Self {
472 Self {
473 target_commit_ids: IndexSet::new(),
474 descendants: vec![],
475 commit_new_parents_map: HashMap::new(),
476 to_abandon: HashSet::new(),
477 }
478 }
479
480 pub fn record_to_abandon(&mut self, commit_ids: impl IntoIterator<Item = CommitId>) {
487 self.to_abandon.extend(commit_ids);
488 }
489
490 pub fn apply(
491 self,
492 mut_repo: &mut MutableRepo,
493 options: &RebaseOptions,
494 ) -> BackendResult<MoveCommitsStats> {
495 apply_move_commits(mut_repo, self, options)
496 }
497}
498
499pub fn move_commits(
508 mut_repo: &mut MutableRepo,
509 loc: &MoveCommitsLocation,
510 options: &RebaseOptions,
511) -> BackendResult<MoveCommitsStats> {
512 compute_move_commits(mut_repo, loc)?.apply(mut_repo, options)
513}
514
515pub fn compute_move_commits(
516 repo: &MutableRepo,
517 loc: &MoveCommitsLocation,
518) -> BackendResult<ComputedMoveCommits> {
519 let target_commit_ids: IndexSet<CommitId>;
520 let connected_target_commits: Vec<Commit>;
521 let connected_target_commits_internal_parents: HashMap<CommitId, IndexSet<CommitId>>;
522 let target_roots: HashSet<CommitId>;
523
524 match &loc.target {
525 MoveCommitsTarget::Commits(commit_ids) => {
526 if commit_ids.is_empty() {
527 return Ok(ComputedMoveCommits::empty());
528 }
529
530 target_commit_ids = commit_ids.iter().cloned().collect();
531
532 connected_target_commits = RevsetExpression::commits(commit_ids.clone())
533 .connected()
534 .evaluate(repo)
535 .map_err(|err| err.into_backend_error())?
536 .iter()
537 .commits(repo.store())
538 .try_collect()
539 .map_err(|err| err.into_backend_error())?;
540 connected_target_commits_internal_parents =
541 compute_internal_parents_within(&target_commit_ids, &connected_target_commits);
542
543 target_roots = connected_target_commits_internal_parents
544 .iter()
545 .filter(|&(commit_id, parents)| {
546 target_commit_ids.contains(commit_id) && parents.is_empty()
547 })
548 .map(|(commit_id, _)| commit_id.clone())
549 .collect();
550 }
551 MoveCommitsTarget::Roots(root_ids) => {
552 if root_ids.is_empty() {
553 return Ok(ComputedMoveCommits::empty());
554 }
555
556 target_commit_ids = RevsetExpression::commits(root_ids.clone())
557 .descendants()
558 .evaluate(repo)
559 .map_err(|err| err.into_backend_error())?
560 .iter()
561 .try_collect()
562 .map_err(|err| err.into_backend_error())?;
563
564 connected_target_commits = target_commit_ids
565 .iter()
566 .map(|id| repo.store().get_commit(id))
567 .try_collect()?;
568 connected_target_commits_internal_parents = HashMap::new();
571 target_roots = root_ids.iter().cloned().collect();
572 }
573 }
574
575 let mut target_commits_external_parents: HashMap<CommitId, IndexSet<CommitId>> = HashMap::new();
579 for id in target_commit_ids.iter().rev() {
580 let commit = repo.store().get_commit(id)?;
581 let mut new_parents = IndexSet::new();
582 for old_parent in commit.parent_ids() {
583 if let Some(parents) = target_commits_external_parents.get(old_parent) {
584 new_parents.extend(parents.iter().cloned());
585 } else {
586 new_parents.insert(old_parent.clone());
587 }
588 }
589 target_commits_external_parents.insert(commit.id().clone(), new_parents);
590 }
591
592 let new_parent_ids: Vec<_> = loc
596 .new_parent_ids
597 .iter()
598 .flat_map(|parent_id| {
599 if let Some(parent_ids) = target_commits_external_parents.get(parent_id) {
600 parent_ids.iter().cloned().collect_vec()
601 } else {
602 vec![parent_id.clone()]
603 }
604 })
605 .collect();
606
607 let new_children: Vec<_> = if loc
611 .new_child_ids
612 .iter()
613 .any(|id| target_commit_ids.contains(id))
614 {
615 let target_commits_descendants: Vec<_> =
616 RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
617 .union(
618 &RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
619 .children(),
620 )
621 .evaluate(repo)
622 .map_err(|err| err.into_backend_error())?
623 .iter()
624 .commits(repo.store())
625 .try_collect()
626 .map_err(|err| err.into_backend_error())?;
627
628 let mut target_commit_external_descendants: HashMap<CommitId, IndexSet<Commit>> =
631 HashMap::new();
632 for commit in &target_commits_descendants {
635 if !target_commit_external_descendants.contains_key(commit.id()) {
636 let children = if target_commit_ids.contains(commit.id()) {
637 IndexSet::new()
638 } else {
639 IndexSet::from([commit.clone()])
640 };
641 target_commit_external_descendants.insert(commit.id().clone(), children);
642 }
643
644 let children = target_commit_external_descendants
645 .get(commit.id())
646 .unwrap()
647 .iter()
648 .cloned()
649 .collect_vec();
650 for parent_id in commit.parent_ids() {
651 if target_commit_ids.contains(parent_id) {
652 if let Some(target_children) =
653 target_commit_external_descendants.get_mut(parent_id)
654 {
655 target_children.extend(children.iter().cloned());
656 } else {
657 target_commit_external_descendants
658 .insert(parent_id.clone(), children.iter().cloned().collect());
659 }
660 };
661 }
662 }
663
664 let mut new_children = Vec::new();
665 for id in &loc.new_child_ids {
666 if let Some(children) = target_commit_external_descendants.get(id) {
667 new_children.extend(children.iter().cloned());
668 } else {
669 new_children.push(repo.store().get_commit(id)?);
670 }
671 }
672 new_children
673 } else {
674 loc.new_child_ids
675 .iter()
676 .map(|id| repo.store().get_commit(id))
677 .try_collect()?
678 };
679
680 let new_children_parents: HashMap<_, _> = if !new_children.is_empty() {
683 let target_heads = compute_commits_heads(&target_commit_ids, &connected_target_commits);
686
687 new_children
688 .iter()
689 .map(|child_commit| {
690 let mut new_child_parent_ids = IndexSet::new();
691 for old_child_parent_id in child_commit.parent_ids() {
692 let old_child_parent_ids = if let Some(parents) =
694 target_commits_external_parents.get(old_child_parent_id)
695 {
696 parents.iter().collect_vec()
697 } else {
698 vec![old_child_parent_id]
699 };
700
701 for id in old_child_parent_ids {
705 if new_parent_ids.contains(id) {
706 new_child_parent_ids.extend(target_heads.clone());
707 } else {
708 new_child_parent_ids.insert(id.clone());
709 };
710 }
711 }
712
713 new_child_parent_ids.extend(target_heads.clone());
716
717 (
718 child_commit.id().clone(),
719 new_child_parent_ids.into_iter().collect_vec(),
720 )
721 })
722 .collect()
723 } else {
724 HashMap::new()
725 };
726
727 let mut roots = target_roots.iter().cloned().collect_vec();
730 roots.extend(new_children.iter().ids().cloned());
731
732 let descendants = repo.find_descendants_for_rebase(roots.clone())?;
733 let commit_new_parents_map = descendants
734 .iter()
735 .map(|commit| -> BackendResult<_> {
736 let commit_id = commit.id();
737 let new_parent_ids =
738 if let Some(new_child_parents) = new_children_parents.get(commit_id) {
739 new_child_parents.clone()
741 } else if target_commit_ids.contains(commit_id) {
742 if target_roots.contains(commit_id) {
744 new_parent_ids.clone()
747 } else {
748 let mut new_parents = vec![];
756 for parent_id in commit.parent_ids() {
757 if target_commit_ids.contains(parent_id) {
758 new_parents.push(parent_id.clone());
759 } else if let Some(parents) =
760 connected_target_commits_internal_parents.get(parent_id)
761 {
762 new_parents.extend(parents.iter().cloned());
763 } else if !fallible_any(&new_children, |child| {
764 repo.index().is_ancestor(child.id(), parent_id)
765 })
766 .map_err(|err| BackendError::Other(err.into()))?
768 {
769 new_parents.push(parent_id.clone());
770 }
771 }
772 new_parents
773 }
774 } else if commit
775 .parent_ids()
776 .iter()
777 .any(|id| target_commits_external_parents.contains_key(id))
778 {
779 let mut new_parents = vec![];
782 for parent in commit.parent_ids() {
783 if let Some(parents) = target_commits_external_parents.get(parent) {
784 new_parents.extend(parents.iter().cloned());
785 } else {
786 new_parents.push(parent.clone());
787 }
788 }
789 new_parents
790 } else {
791 commit.parent_ids().iter().cloned().collect_vec()
792 };
793 Ok((commit.id().clone(), new_parent_ids))
794 })
795 .try_collect()?;
796
797 Ok(ComputedMoveCommits {
798 target_commit_ids,
799 descendants,
800 commit_new_parents_map,
801 to_abandon: HashSet::new(),
802 })
803}
804
805fn apply_move_commits(
806 mut_repo: &mut MutableRepo,
807 commits: ComputedMoveCommits,
808 options: &RebaseOptions,
809) -> BackendResult<MoveCommitsStats> {
810 let mut num_rebased_targets = 0;
811 let mut num_rebased_descendants = 0;
812 let mut num_skipped_rebases = 0;
813 let mut num_abandoned_empty = 0;
814
815 let rebase_descendant_options = &RebaseOptions {
817 empty: EmptyBehavior::Keep,
818 rewrite_refs: options.rewrite_refs.clone(),
819 simplify_ancestor_merge: options.simplify_ancestor_merge,
820 };
821
822 let mut rebased_commits: HashMap<CommitId, RebasedCommit> = HashMap::new();
823 mut_repo.transform_commits(
824 commits.descendants,
825 &commits.commit_new_parents_map,
826 &options.rewrite_refs,
827 async |rewriter| {
828 let old_commit_id = rewriter.old_commit().id().clone();
829 if commits.to_abandon.contains(&old_commit_id) {
830 rewriter.abandon();
831 } else if rewriter.parents_changed() {
832 let is_target_commit = commits.target_commit_ids.contains(&old_commit_id);
833 let rebased_commit = rebase_commit_with_options(
834 rewriter,
835 if is_target_commit {
836 options
837 } else {
838 rebase_descendant_options
839 },
840 )?;
841 if let RebasedCommit::Abandoned { .. } = rebased_commit {
842 num_abandoned_empty += 1;
843 } else if is_target_commit {
844 num_rebased_targets += 1;
845 } else {
846 num_rebased_descendants += 1;
847 }
848 rebased_commits.insert(old_commit_id, rebased_commit);
849 } else {
850 num_skipped_rebases += 1;
851 }
852
853 Ok(())
854 },
855 )?;
856
857 Ok(MoveCommitsStats {
858 num_rebased_targets,
859 num_rebased_descendants,
860 num_skipped_rebases,
861 num_abandoned_empty,
862 rebased_commits,
863 })
864}
865
866#[derive(Debug, Default)]
867pub struct DuplicateCommitsStats {
868 pub duplicated_commits: IndexMap<CommitId, Commit>,
870 pub num_rebased: u32,
873}
874
875pub async fn duplicate_commits(
893 mut_repo: &mut MutableRepo,
894 target_commit_ids: &[CommitId],
895 target_descriptions: &HashMap<CommitId, String>,
896 parent_commit_ids: &[CommitId],
897 children_commit_ids: &[CommitId],
898) -> BackendResult<DuplicateCommitsStats> {
899 if target_commit_ids.is_empty() {
900 return Ok(DuplicateCommitsStats::default());
901 }
902
903 let mut duplicated_old_to_new: IndexMap<CommitId, Commit> = IndexMap::new();
904 let mut num_rebased = 0;
905
906 let target_commit_ids: IndexSet<_> = target_commit_ids.iter().cloned().collect();
907
908 let connected_target_commits: Vec<_> =
909 RevsetExpression::commits(target_commit_ids.iter().cloned().collect_vec())
910 .connected()
911 .evaluate(mut_repo)
912 .map_err(|err| err.into_backend_error())?
913 .iter()
914 .commits(mut_repo.store())
915 .try_collect()
916 .map_err(|err| err.into_backend_error())?;
917
918 let target_commits_internal_parents = {
925 let mut target_commits_internal_parents =
926 compute_internal_parents_within(&target_commit_ids, &connected_target_commits);
927 target_commits_internal_parents.retain(|id, _| target_commit_ids.contains(id));
928 target_commits_internal_parents
929 };
930
931 let target_root_ids: HashSet<_> = target_commits_internal_parents
933 .iter()
934 .filter(|(_, parents)| parents.is_empty())
935 .map(|(commit_id, _)| commit_id.clone())
936 .collect();
937
938 let target_head_ids = if !children_commit_ids.is_empty() {
941 compute_commits_heads(&target_commit_ids, &connected_target_commits)
942 } else {
943 vec![]
944 };
945
946 for original_commit_id in target_commit_ids.iter().rev() {
949 let original_commit = mut_repo
950 .store()
951 .get_commit_async(original_commit_id)
952 .await?;
953 let new_parent_ids = if target_root_ids.contains(original_commit_id) {
954 parent_commit_ids.to_vec()
955 } else {
956 target_commits_internal_parents
957 .get(original_commit_id)
958 .unwrap()
959 .iter()
960 .map(|id| {
962 duplicated_old_to_new
963 .get(id)
964 .map_or(id, |commit| commit.id())
965 .clone()
966 })
967 .collect()
968 };
969 let mut new_commit_builder = CommitRewriter::new(mut_repo, original_commit, new_parent_ids)
970 .rebase()
971 .await?
972 .clear_rewrite_source()
973 .generate_new_change_id();
974 if let Some(desc) = target_descriptions.get(original_commit_id) {
975 new_commit_builder = new_commit_builder.set_description(desc);
976 }
977 duplicated_old_to_new.insert(original_commit_id.clone(), new_commit_builder.write()?);
978 }
979
980 let target_head_ids = target_head_ids
983 .into_iter()
984 .map(|commit_id| {
985 duplicated_old_to_new
986 .get(&commit_id)
987 .map_or(commit_id, |commit| commit.id().clone())
988 })
989 .collect_vec();
990
991 let children_commit_ids_set: HashSet<CommitId> = children_commit_ids.iter().cloned().collect();
993 mut_repo.transform_descendants(children_commit_ids.to_vec(), async |mut rewriter| {
994 if children_commit_ids_set.contains(rewriter.old_commit().id()) {
995 let mut child_new_parent_ids = IndexSet::new();
996 for old_parent_id in rewriter.old_commit().parent_ids() {
997 if parent_commit_ids.contains(old_parent_id) {
1002 child_new_parent_ids.extend(target_head_ids.clone());
1003 } else {
1004 child_new_parent_ids.insert(old_parent_id.clone());
1005 }
1006 }
1007 child_new_parent_ids.extend(target_head_ids.clone());
1010 rewriter.set_new_parents(child_new_parent_ids.into_iter().collect());
1011 }
1012 num_rebased += 1;
1013 rewriter.rebase().await?.write()?;
1014 Ok(())
1015 })?;
1016
1017 Ok(DuplicateCommitsStats {
1018 duplicated_commits: duplicated_old_to_new,
1019 num_rebased,
1020 })
1021}
1022
1023pub fn duplicate_commits_onto_parents(
1033 mut_repo: &mut MutableRepo,
1034 target_commits: &[CommitId],
1035 target_descriptions: &HashMap<CommitId, String>,
1036) -> BackendResult<DuplicateCommitsStats> {
1037 if target_commits.is_empty() {
1038 return Ok(DuplicateCommitsStats::default());
1039 }
1040
1041 let mut duplicated_old_to_new: IndexMap<CommitId, Commit> = IndexMap::new();
1042
1043 for original_commit_id in target_commits.iter().rev() {
1046 let original_commit = mut_repo.store().get_commit(original_commit_id)?;
1047 let new_parent_ids = original_commit
1048 .parent_ids()
1049 .iter()
1050 .map(|id| {
1051 duplicated_old_to_new
1052 .get(id)
1053 .map_or(id, |commit| commit.id())
1054 .clone()
1055 })
1056 .collect();
1057 let mut new_commit_builder = mut_repo
1058 .rewrite_commit(&original_commit)
1059 .clear_rewrite_source()
1060 .generate_new_change_id()
1061 .set_parents(new_parent_ids);
1062 if let Some(desc) = target_descriptions.get(original_commit_id) {
1063 new_commit_builder = new_commit_builder.set_description(desc);
1064 }
1065 duplicated_old_to_new.insert(original_commit_id.clone(), new_commit_builder.write()?);
1066 }
1067
1068 Ok(DuplicateCommitsStats {
1069 duplicated_commits: duplicated_old_to_new,
1070 num_rebased: 0,
1071 })
1072}
1073
1074fn compute_internal_parents_within(
1082 target_commit_ids: &IndexSet<CommitId>,
1083 graph_commits: &[Commit],
1084) -> HashMap<CommitId, IndexSet<CommitId>> {
1085 let mut internal_parents: HashMap<CommitId, IndexSet<CommitId>> = HashMap::new();
1086 for commit in graph_commits.iter().rev() {
1087 let mut new_parents = IndexSet::new();
1090 for old_parent in commit.parent_ids() {
1091 if target_commit_ids.contains(old_parent) {
1092 new_parents.insert(old_parent.clone());
1093 } else if let Some(parents) = internal_parents.get(old_parent) {
1094 new_parents.extend(parents.iter().cloned());
1095 }
1096 }
1097 internal_parents.insert(commit.id().clone(), new_parents);
1098 }
1099 internal_parents
1100}
1101
1102fn compute_commits_heads(
1108 target_commit_ids: &IndexSet<CommitId>,
1109 connected_target_commits: &[Commit],
1110) -> Vec<CommitId> {
1111 let mut target_head_ids: HashSet<CommitId> = HashSet::new();
1112 for commit in connected_target_commits.iter().rev() {
1113 target_head_ids.insert(commit.id().clone());
1114 for old_parent in commit.parent_ids() {
1115 target_head_ids.remove(old_parent);
1116 }
1117 }
1118 connected_target_commits
1119 .iter()
1120 .rev()
1121 .filter(|commit| {
1122 target_head_ids.contains(commit.id()) && target_commit_ids.contains(commit.id())
1123 })
1124 .map(|commit| commit.id().clone())
1125 .collect_vec()
1126}
1127
1128#[derive(Debug)]
1129pub struct CommitWithSelection {
1130 pub commit: Commit,
1131 pub selected_tree: MergedTree,
1132 pub parent_tree: MergedTree,
1133}
1134
1135impl CommitWithSelection {
1136 pub fn is_full_selection(&self) -> bool {
1138 self.selected_tree.tree_ids() == self.commit.tree_ids()
1139 }
1140
1141 pub fn is_empty_selection(&self) -> bool {
1147 self.selected_tree.tree_ids() == self.parent_tree.tree_ids()
1148 }
1149}
1150
1151#[must_use]
1153pub struct SquashedCommit<'repo> {
1154 pub commit_builder: CommitBuilder<'repo>,
1156 pub abandoned_commits: Vec<Commit>,
1158}
1159
1160pub fn squash_commits<'repo>(
1164 repo: &'repo mut MutableRepo,
1165 sources: &[CommitWithSelection],
1166 destination: &Commit,
1167 keep_emptied: bool,
1168) -> BackendResult<Option<SquashedCommit<'repo>>> {
1169 struct SourceCommit<'a> {
1170 commit: &'a CommitWithSelection,
1171 abandon: bool,
1172 }
1173 let mut source_commits = vec![];
1174 for source in sources {
1175 let abandon = !keep_emptied && source.is_full_selection();
1176 if !abandon && source.is_empty_selection() {
1177 continue;
1181 }
1182
1183 source_commits.push(SourceCommit {
1186 commit: source,
1187 abandon,
1188 });
1189 }
1190
1191 if source_commits.is_empty() {
1192 return Ok(None);
1193 }
1194
1195 let mut abandoned_commits = vec![];
1196 for source in &source_commits {
1197 if source.abandon {
1198 repo.record_abandoned_commit(&source.commit.commit);
1199 abandoned_commits.push(source.commit.commit.clone());
1200 } else {
1201 let source_tree = source.commit.commit.tree();
1202 let new_source_tree = source_tree
1204 .merge(
1205 source.commit.selected_tree.clone(),
1206 source.commit.parent_tree.clone(),
1207 )
1208 .block_on()?;
1209 repo.rewrite_commit(&source.commit.commit)
1210 .set_tree(new_source_tree)
1211 .write()?;
1212 }
1213 }
1214
1215 let mut rewritten_destination = destination.clone();
1216 if fallible_any(sources, |source| {
1217 repo.index()
1218 .is_ancestor(source.commit.id(), destination.id())
1219 })
1220 .map_err(|err| BackendError::Other(err.into()))?
1222 {
1223 let options = RebaseOptions::default();
1228 repo.rebase_descendants_with_options(&options, |old_commit, rebased_commit| {
1229 if old_commit.id() != destination.id() {
1230 return;
1231 }
1232 rewritten_destination = match rebased_commit {
1233 RebasedCommit::Rewritten(commit) => commit,
1234 RebasedCommit::Abandoned { .. } => panic!("all commits should be kept"),
1235 };
1236 })?;
1237 }
1238 let mut destination_tree = rewritten_destination.tree();
1240 for source in &source_commits {
1241 destination_tree = destination_tree
1242 .merge(
1243 source.commit.parent_tree.clone(),
1244 source.commit.selected_tree.clone(),
1245 )
1246 .block_on()?;
1247 }
1248 let mut predecessors = vec![destination.id().clone()];
1249 predecessors.extend(
1250 source_commits
1251 .iter()
1252 .map(|source| source.commit.commit.id().clone()),
1253 );
1254
1255 let commit_builder = repo
1256 .rewrite_commit(&rewritten_destination)
1257 .set_tree(destination_tree)
1258 .set_predecessors(predecessors);
1259 Ok(Some(SquashedCommit {
1260 commit_builder,
1261 abandoned_commits,
1262 }))
1263}
1264
1265pub fn find_duplicate_divergent_commits(
1269 repo: &dyn Repo,
1270 new_parent_ids: &[CommitId],
1271 target: &MoveCommitsTarget,
1272) -> BackendResult<Vec<Commit>> {
1273 let target_commits: Vec<Commit> = match target {
1274 MoveCommitsTarget::Commits(commit_ids) => commit_ids
1275 .iter()
1276 .map(|commit_id| repo.store().get_commit(commit_id))
1277 .try_collect()?,
1278 MoveCommitsTarget::Roots(root_ids) => RevsetExpression::commits(root_ids.clone())
1279 .descendants()
1280 .evaluate(repo)
1281 .map_err(|err| err.into_backend_error())?
1282 .iter()
1283 .commits(repo.store())
1284 .try_collect()
1285 .map_err(|err| err.into_backend_error())?,
1286 };
1287 let target_commit_ids: HashSet<&CommitId> = target_commits.iter().map(Commit::id).collect();
1288
1289 let divergent_changes: Vec<(&Commit, Vec<CommitId>)> = target_commits
1292 .iter()
1293 .map(|target_commit| -> Result<_, BackendError> {
1294 let mut ancestor_candidates = repo
1295 .resolve_change_id(target_commit.change_id())
1296 .map_err(|err| BackendError::Other(err.into()))?
1298 .unwrap_or_default();
1299 ancestor_candidates.retain(|commit_id| !target_commit_ids.contains(commit_id));
1300 Ok((target_commit, ancestor_candidates))
1301 })
1302 .filter_ok(|(_, candidates)| !candidates.is_empty())
1303 .try_collect()?;
1304 if divergent_changes.is_empty() {
1305 return Ok(Vec::new());
1306 }
1307
1308 let target_root_ids = match target {
1309 MoveCommitsTarget::Commits(commit_ids) => commit_ids,
1310 MoveCommitsTarget::Roots(root_ids) => root_ids,
1311 };
1312
1313 let is_new_ancestor = RevsetExpression::commits(target_root_ids.clone())
1316 .range(&RevsetExpression::commits(new_parent_ids.to_owned()))
1317 .evaluate(repo)
1318 .map_err(|err| err.into_backend_error())?
1319 .containing_fn();
1320
1321 let mut duplicate_divergent = Vec::new();
1322 for (target_commit, ancestor_candidates) in divergent_changes {
1327 for ancestor_candidate_id in ancestor_candidates {
1328 if !is_new_ancestor(&ancestor_candidate_id).map_err(|err| err.into_backend_error())? {
1329 continue;
1330 }
1331
1332 let ancestor_candidate = repo.store().get_commit(&ancestor_candidate_id)?;
1333 let new_tree =
1334 rebase_to_dest_parent(repo, slice::from_ref(target_commit), &ancestor_candidate)?;
1335 if new_tree.tree_ids() == ancestor_candidate.tree_ids() {
1338 duplicate_divergent.push(target_commit.clone());
1339 break;
1340 }
1341 }
1342 }
1343 Ok(duplicate_divergent)
1344}