1#![expect(missing_docs)]
16
17use std::collections::BTreeMap;
18use std::collections::HashMap;
19use std::collections::HashSet;
20use std::collections::hash_map::Entry;
21use std::fmt::Debug;
22use std::fmt::Formatter;
23use std::fs;
24use std::path::Path;
25use std::slice;
26use std::sync::Arc;
27
28use futures::future::try_join_all;
29use itertools::Itertools as _;
30use once_cell::sync::OnceCell;
31use pollster::FutureExt as _;
32use thiserror::Error;
33use tracing::instrument;
34
35use self::dirty_cell::DirtyCell;
36use crate::backend::Backend;
37use crate::backend::BackendError;
38use crate::backend::BackendInitError;
39use crate::backend::BackendLoadError;
40use crate::backend::BackendResult;
41use crate::backend::ChangeId;
42use crate::backend::CommitId;
43use crate::commit::Commit;
44use crate::commit::CommitByCommitterTimestamp;
45use crate::commit_builder::CommitBuilder;
46use crate::commit_builder::DetachedCommitBuilder;
47use crate::dag_walk;
48use crate::default_index::DefaultIndexStore;
49use crate::default_index::DefaultMutableIndex;
50use crate::default_submodule_store::DefaultSubmoduleStore;
51use crate::file_util::IoResultExt as _;
52use crate::file_util::PathError;
53use crate::index::ChangeIdIndex;
54use crate::index::Index;
55use crate::index::IndexError;
56use crate::index::IndexResult;
57use crate::index::IndexStore;
58use crate::index::IndexStoreError;
59use crate::index::MutableIndex;
60use crate::index::ReadonlyIndex;
61use crate::index::ResolvedChangeTargets;
62use crate::merge::MergeBuilder;
63use crate::merge::SameChange;
64use crate::merge::trivial_merge;
65use crate::merged_tree::MergedTree;
66use crate::object_id::HexPrefix;
67use crate::object_id::PrefixResolution;
68use crate::op_heads_store;
69use crate::op_heads_store::OpHeadResolutionError;
70use crate::op_heads_store::OpHeadsStore;
71use crate::op_heads_store::OpHeadsStoreError;
72use crate::op_store;
73use crate::op_store::OpStore;
74use crate::op_store::OpStoreError;
75use crate::op_store::OpStoreResult;
76use crate::op_store::OperationId;
77use crate::op_store::RefTarget;
78use crate::op_store::RemoteRef;
79use crate::op_store::RemoteRefState;
80use crate::op_store::RootOperationData;
81use crate::operation::Operation;
82use crate::ref_name::GitRefName;
83use crate::ref_name::RefName;
84use crate::ref_name::RemoteName;
85use crate::ref_name::RemoteRefSymbol;
86use crate::ref_name::WorkspaceName;
87use crate::ref_name::WorkspaceNameBuf;
88use crate::refs::diff_named_commit_ids;
89use crate::refs::diff_named_ref_targets;
90use crate::refs::diff_named_remote_refs;
91use crate::refs::merge_ref_targets;
92use crate::refs::merge_remote_refs;
93use crate::revset;
94use crate::revset::RevsetEvaluationError;
95use crate::revset::RevsetExpression;
96use crate::revset::RevsetIteratorExt as _;
97use crate::rewrite::CommitRewriter;
98use crate::rewrite::RebaseOptions;
99use crate::rewrite::RebasedCommit;
100use crate::rewrite::RewriteRefsOptions;
101use crate::rewrite::merge_commit_trees;
102use crate::rewrite::rebase_commit_with_options;
103use crate::settings::UserSettings;
104use crate::signing::SignInitError;
105use crate::signing::Signer;
106use crate::simple_backend::SimpleBackend;
107use crate::simple_op_heads_store::SimpleOpHeadsStore;
108use crate::simple_op_store::SimpleOpStore;
109use crate::store::Store;
110use crate::submodule_store::SubmoduleStore;
111use crate::transaction::Transaction;
112use crate::transaction::TransactionCommitError;
113use crate::tree_merge::MergeOptions;
114use crate::view::RenameWorkspaceError;
115use crate::view::View;
116
117pub trait Repo {
118 fn base_repo(&self) -> &ReadonlyRepo;
121
122 fn store(&self) -> &Arc<Store>;
123
124 fn op_store(&self) -> &Arc<dyn OpStore>;
125
126 fn index(&self) -> &dyn Index;
127
128 fn view(&self) -> &View;
129
130 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore>;
131
132 fn resolve_change_id(
133 &self,
134 change_id: &ChangeId,
135 ) -> IndexResult<Option<ResolvedChangeTargets>> {
136 let prefix = HexPrefix::from_id(change_id);
138 match self.resolve_change_id_prefix(&prefix)? {
139 PrefixResolution::NoMatch => Ok(None),
140 PrefixResolution::SingleMatch(entries) => Ok(Some(entries)),
141 PrefixResolution::AmbiguousMatch => panic!("complete change_id should be unambiguous"),
142 }
143 }
144
145 fn resolve_change_id_prefix(
146 &self,
147 prefix: &HexPrefix,
148 ) -> IndexResult<PrefixResolution<ResolvedChangeTargets>>;
149
150 fn shortest_unique_change_id_prefix_len(
151 &self,
152 target_id_bytes: &ChangeId,
153 ) -> IndexResult<usize>;
154}
155
156pub struct ReadonlyRepo {
157 loader: RepoLoader,
158 operation: Operation,
159 index: Box<dyn ReadonlyIndex>,
160 change_id_index: OnceCell<Box<dyn ChangeIdIndex>>,
161 view: View,
163}
164
165impl Debug for ReadonlyRepo {
166 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
167 f.debug_struct("ReadonlyRepo")
168 .field("store", &self.loader.store)
169 .finish_non_exhaustive()
170 }
171}
172
173#[derive(Error, Debug)]
174pub enum RepoInitError {
175 #[error(transparent)]
176 Backend(#[from] BackendInitError),
177 #[error(transparent)]
178 OpHeadsStore(#[from] OpHeadsStoreError),
179 #[error(transparent)]
180 Path(#[from] PathError),
181}
182
183impl ReadonlyRepo {
184 pub fn default_op_store_initializer() -> &'static OpStoreInitializer<'static> {
185 &|_settings, store_path, root_data| {
186 Ok(Box::new(SimpleOpStore::init(store_path, root_data)?))
187 }
188 }
189
190 pub fn default_op_heads_store_initializer() -> &'static OpHeadsStoreInitializer<'static> {
191 &|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::init(store_path)?))
192 }
193
194 pub fn default_index_store_initializer() -> &'static IndexStoreInitializer<'static> {
195 &|_settings, store_path| Ok(Box::new(DefaultIndexStore::init(store_path)?))
196 }
197
198 pub fn default_submodule_store_initializer() -> &'static SubmoduleStoreInitializer<'static> {
199 &|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::init(store_path)))
200 }
201
202 #[expect(clippy::too_many_arguments)]
203 pub async fn init(
204 settings: &UserSettings,
205 repo_path: &Path,
206 backend_initializer: &BackendInitializer<'_>,
207 signer: Signer,
208 op_store_initializer: &OpStoreInitializer<'_>,
209 op_heads_store_initializer: &OpHeadsStoreInitializer<'_>,
210 index_store_initializer: &IndexStoreInitializer<'_>,
211 submodule_store_initializer: &SubmoduleStoreInitializer<'_>,
212 ) -> Result<Arc<Self>, RepoInitError> {
213 let repo_path = dunce::canonicalize(repo_path).context(repo_path)?;
214
215 let store_path = repo_path.join("store");
216 fs::create_dir(&store_path).context(&store_path)?;
217 let backend = backend_initializer(settings, &store_path)?;
218 let backend_path = store_path.join("type");
219 fs::write(&backend_path, backend.name()).context(&backend_path)?;
220 let merge_options =
221 MergeOptions::from_settings(settings).map_err(|err| BackendInitError(err.into()))?;
222 let store = Store::new(backend, signer, merge_options);
223
224 let op_store_path = repo_path.join("op_store");
225 fs::create_dir(&op_store_path).context(&op_store_path)?;
226 let root_op_data = RootOperationData {
227 root_commit_id: store.root_commit_id().clone(),
228 };
229 let op_store = op_store_initializer(settings, &op_store_path, root_op_data)?;
230 let op_store_type_path = op_store_path.join("type");
231 fs::write(&op_store_type_path, op_store.name()).context(&op_store_type_path)?;
232 let op_store: Arc<dyn OpStore> = Arc::from(op_store);
233
234 let op_heads_path = repo_path.join("op_heads");
235 fs::create_dir(&op_heads_path).context(&op_heads_path)?;
236 let op_heads_store = op_heads_store_initializer(settings, &op_heads_path)?;
237 let op_heads_type_path = op_heads_path.join("type");
238 fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?;
239 op_heads_store
240 .update_op_heads(&[], op_store.root_operation_id())
241 .await?;
242 let op_heads_store: Arc<dyn OpHeadsStore> = Arc::from(op_heads_store);
243
244 let index_path = repo_path.join("index");
245 fs::create_dir(&index_path).context(&index_path)?;
246 let index_store = index_store_initializer(settings, &index_path)?;
247 let index_type_path = index_path.join("type");
248 fs::write(&index_type_path, index_store.name()).context(&index_type_path)?;
249 let index_store: Arc<dyn IndexStore> = Arc::from(index_store);
250
251 let submodule_store_path = repo_path.join("submodule_store");
252 fs::create_dir(&submodule_store_path).context(&submodule_store_path)?;
253 let submodule_store = submodule_store_initializer(settings, &submodule_store_path)?;
254 let submodule_store_type_path = submodule_store_path.join("type");
255 fs::write(&submodule_store_type_path, submodule_store.name())
256 .context(&submodule_store_type_path)?;
257 let submodule_store = Arc::from(submodule_store);
258
259 let loader = RepoLoader {
260 settings: settings.clone(),
261 store,
262 op_store,
263 op_heads_store,
264 index_store,
265 submodule_store,
266 };
267
268 let root_operation = loader.root_operation().await;
269 let root_view = root_operation
270 .view()
271 .await
272 .expect("failed to read root view");
273 assert!(!root_view.heads().is_empty());
274 let index = loader
275 .index_store
276 .get_index_at_op(&root_operation, &loader.store)
277 .await
278 .map_err(|err| BackendInitError(err.into()))?;
281 Ok(Arc::new(Self {
282 loader,
283 operation: root_operation,
284 index,
285 change_id_index: OnceCell::new(),
286 view: root_view,
287 }))
288 }
289
290 pub fn loader(&self) -> &RepoLoader {
291 &self.loader
292 }
293
294 pub fn op_id(&self) -> &OperationId {
295 self.operation.id()
296 }
297
298 pub fn operation(&self) -> &Operation {
299 &self.operation
300 }
301
302 pub fn view(&self) -> &View {
303 &self.view
304 }
305
306 pub fn readonly_index(&self) -> &dyn ReadonlyIndex {
307 self.index.as_ref()
308 }
309
310 fn change_id_index(&self) -> &dyn ChangeIdIndex {
311 self.change_id_index
312 .get_or_init(|| {
313 self.readonly_index()
314 .change_id_index(&mut self.view().heads().iter())
315 })
316 .as_ref()
317 }
318
319 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
320 self.loader.op_heads_store()
321 }
322
323 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
324 self.loader.index_store()
325 }
326
327 pub fn settings(&self) -> &UserSettings {
328 self.loader.settings()
329 }
330
331 pub fn start_transaction(self: &Arc<Self>) -> Transaction {
332 let mut_repo = MutableRepo::new(self.clone(), self.readonly_index(), &self.view);
333 Transaction::new(mut_repo, self.settings())
334 }
335
336 pub async fn reload_at_head(&self) -> Result<Arc<Self>, RepoLoaderError> {
337 self.loader().load_at_head().await
338 }
339
340 #[instrument]
341 pub async fn reload_at(&self, operation: &Operation) -> Result<Arc<Self>, RepoLoaderError> {
342 self.loader().load_at(operation).await
343 }
344}
345
346impl Repo for ReadonlyRepo {
347 fn base_repo(&self) -> &ReadonlyRepo {
348 self
349 }
350
351 fn store(&self) -> &Arc<Store> {
352 self.loader.store()
353 }
354
355 fn op_store(&self) -> &Arc<dyn OpStore> {
356 self.loader.op_store()
357 }
358
359 fn index(&self) -> &dyn Index {
360 self.readonly_index().as_index()
361 }
362
363 fn view(&self) -> &View {
364 &self.view
365 }
366
367 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
368 self.loader.submodule_store()
369 }
370
371 fn resolve_change_id_prefix(
372 &self,
373 prefix: &HexPrefix,
374 ) -> IndexResult<PrefixResolution<ResolvedChangeTargets>> {
375 self.change_id_index().resolve_prefix(prefix)
376 }
377
378 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> IndexResult<usize> {
379 self.change_id_index().shortest_unique_prefix_len(target_id)
380 }
381}
382
383pub type BackendInitializer<'a> =
384 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendInitError> + 'a;
385#[rustfmt::skip] pub type OpStoreInitializer<'a> =
387 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendInitError>
388 + 'a;
389pub type OpHeadsStoreInitializer<'a> =
390 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendInitError> + 'a;
391pub type IndexStoreInitializer<'a> =
392 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendInitError> + 'a;
393pub type SubmoduleStoreInitializer<'a> =
394 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendInitError> + 'a;
395
396type BackendFactory =
397 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendLoadError>>;
398type OpStoreFactory = Box<
399 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendLoadError>,
400>;
401type OpHeadsStoreFactory =
402 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendLoadError>>;
403type IndexStoreFactory =
404 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendLoadError>>;
405type SubmoduleStoreFactory =
406 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendLoadError>>;
407
408pub fn merge_factories_map<F>(base: &mut HashMap<String, F>, ext: HashMap<String, F>) {
409 for (name, factory) in ext {
410 match base.entry(name) {
411 Entry::Vacant(v) => {
412 v.insert(factory);
413 }
414 Entry::Occupied(o) => {
415 panic!("Conflicting factory definitions for '{}' factory", o.key())
416 }
417 }
418 }
419}
420
421pub struct StoreFactories {
422 backend_factories: HashMap<String, BackendFactory>,
423 op_store_factories: HashMap<String, OpStoreFactory>,
424 op_heads_store_factories: HashMap<String, OpHeadsStoreFactory>,
425 index_store_factories: HashMap<String, IndexStoreFactory>,
426 submodule_store_factories: HashMap<String, SubmoduleStoreFactory>,
427}
428
429impl Default for StoreFactories {
430 fn default() -> Self {
431 let mut factories = Self::empty();
432
433 factories.add_backend(
435 SimpleBackend::name(),
436 Box::new(|_settings, store_path| Ok(Box::new(SimpleBackend::load(store_path)))),
437 );
438 #[cfg(feature = "git")]
439 factories.add_backend(
440 crate::git_backend::GitBackend::name(),
441 Box::new(|settings, store_path| {
442 Ok(Box::new(crate::git_backend::GitBackend::load(
443 settings, store_path,
444 )?))
445 }),
446 );
447 #[cfg(feature = "testing")]
448 factories.add_backend(
449 crate::secret_backend::SecretBackend::name(),
450 Box::new(|settings, store_path| {
451 Ok(Box::new(crate::secret_backend::SecretBackend::load(
452 settings, store_path,
453 )?))
454 }),
455 );
456
457 factories.add_op_store(
459 SimpleOpStore::name(),
460 Box::new(|_settings, store_path, root_data| {
461 Ok(Box::new(SimpleOpStore::load(store_path, root_data)))
462 }),
463 );
464
465 factories.add_op_heads_store(
467 SimpleOpHeadsStore::name(),
468 Box::new(|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::load(store_path)))),
469 );
470
471 factories.add_index_store(
473 DefaultIndexStore::name(),
474 Box::new(|_settings, store_path| Ok(Box::new(DefaultIndexStore::load(store_path)))),
475 );
476
477 factories.add_submodule_store(
479 DefaultSubmoduleStore::name(),
480 Box::new(|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::load(store_path)))),
481 );
482
483 factories
484 }
485}
486
487#[derive(Debug, Error)]
488pub enum StoreLoadError {
489 #[error("Unsupported {store} backend type '{store_type}'")]
490 UnsupportedType {
491 store: &'static str,
492 store_type: String,
493 },
494 #[error("Failed to read {store} backend type")]
495 ReadError {
496 store: &'static str,
497 source: PathError,
498 },
499 #[error(transparent)]
500 Backend(#[from] BackendLoadError),
501 #[error(transparent)]
502 Signing(#[from] SignInitError),
503}
504
505impl StoreFactories {
506 pub fn empty() -> Self {
507 Self {
508 backend_factories: HashMap::new(),
509 op_store_factories: HashMap::new(),
510 op_heads_store_factories: HashMap::new(),
511 index_store_factories: HashMap::new(),
512 submodule_store_factories: HashMap::new(),
513 }
514 }
515
516 pub fn merge(&mut self, ext: Self) {
517 let Self {
518 backend_factories,
519 op_store_factories,
520 op_heads_store_factories,
521 index_store_factories,
522 submodule_store_factories,
523 } = ext;
524
525 merge_factories_map(&mut self.backend_factories, backend_factories);
526 merge_factories_map(&mut self.op_store_factories, op_store_factories);
527 merge_factories_map(&mut self.op_heads_store_factories, op_heads_store_factories);
528 merge_factories_map(&mut self.index_store_factories, index_store_factories);
529 merge_factories_map(
530 &mut self.submodule_store_factories,
531 submodule_store_factories,
532 );
533 }
534
535 pub fn add_backend(&mut self, name: &str, factory: BackendFactory) {
536 self.backend_factories.insert(name.to_string(), factory);
537 }
538
539 pub fn load_backend(
540 &self,
541 settings: &UserSettings,
542 store_path: &Path,
543 ) -> Result<Box<dyn Backend>, StoreLoadError> {
544 let backend_type = read_store_type("commit", store_path.join("type"))?;
545 let backend_factory = self.backend_factories.get(&backend_type).ok_or_else(|| {
546 StoreLoadError::UnsupportedType {
547 store: "commit",
548 store_type: backend_type.clone(),
549 }
550 })?;
551 Ok(backend_factory(settings, store_path)?)
552 }
553
554 pub fn add_op_store(&mut self, name: &str, factory: OpStoreFactory) {
555 self.op_store_factories.insert(name.to_string(), factory);
556 }
557
558 pub fn load_op_store(
559 &self,
560 settings: &UserSettings,
561 store_path: &Path,
562 root_data: RootOperationData,
563 ) -> Result<Box<dyn OpStore>, StoreLoadError> {
564 let op_store_type = read_store_type("operation", store_path.join("type"))?;
565 let op_store_factory = self.op_store_factories.get(&op_store_type).ok_or_else(|| {
566 StoreLoadError::UnsupportedType {
567 store: "operation",
568 store_type: op_store_type.clone(),
569 }
570 })?;
571 Ok(op_store_factory(settings, store_path, root_data)?)
572 }
573
574 pub fn add_op_heads_store(&mut self, name: &str, factory: OpHeadsStoreFactory) {
575 self.op_heads_store_factories
576 .insert(name.to_string(), factory);
577 }
578
579 pub fn load_op_heads_store(
580 &self,
581 settings: &UserSettings,
582 store_path: &Path,
583 ) -> Result<Box<dyn OpHeadsStore>, StoreLoadError> {
584 let op_heads_store_type = read_store_type("operation heads", store_path.join("type"))?;
585 let op_heads_store_factory = self
586 .op_heads_store_factories
587 .get(&op_heads_store_type)
588 .ok_or_else(|| StoreLoadError::UnsupportedType {
589 store: "operation heads",
590 store_type: op_heads_store_type.clone(),
591 })?;
592 Ok(op_heads_store_factory(settings, store_path)?)
593 }
594
595 pub fn add_index_store(&mut self, name: &str, factory: IndexStoreFactory) {
596 self.index_store_factories.insert(name.to_string(), factory);
597 }
598
599 pub fn load_index_store(
600 &self,
601 settings: &UserSettings,
602 store_path: &Path,
603 ) -> Result<Box<dyn IndexStore>, StoreLoadError> {
604 let index_store_type = read_store_type("index", store_path.join("type"))?;
605 let index_store_factory = self
606 .index_store_factories
607 .get(&index_store_type)
608 .ok_or_else(|| StoreLoadError::UnsupportedType {
609 store: "index",
610 store_type: index_store_type.clone(),
611 })?;
612 Ok(index_store_factory(settings, store_path)?)
613 }
614
615 pub fn add_submodule_store(&mut self, name: &str, factory: SubmoduleStoreFactory) {
616 self.submodule_store_factories
617 .insert(name.to_string(), factory);
618 }
619
620 pub fn load_submodule_store(
621 &self,
622 settings: &UserSettings,
623 store_path: &Path,
624 ) -> Result<Box<dyn SubmoduleStore>, StoreLoadError> {
625 let submodule_store_type = read_store_type("submodule_store", store_path.join("type"))?;
626 let submodule_store_factory = self
627 .submodule_store_factories
628 .get(&submodule_store_type)
629 .ok_or_else(|| StoreLoadError::UnsupportedType {
630 store: "submodule_store",
631 store_type: submodule_store_type.clone(),
632 })?;
633
634 Ok(submodule_store_factory(settings, store_path)?)
635 }
636}
637
638pub fn read_store_type(
639 store: &'static str,
640 path: impl AsRef<Path>,
641) -> Result<String, StoreLoadError> {
642 let path = path.as_ref();
643 fs::read_to_string(path)
644 .context(path)
645 .map_err(|source| StoreLoadError::ReadError { store, source })
646}
647
648#[derive(Debug, Error)]
649pub enum RepoLoaderError {
650 #[error(transparent)]
651 Backend(#[from] BackendError),
652 #[error(transparent)]
653 Index(#[from] IndexError),
654 #[error(transparent)]
655 IndexStore(#[from] IndexStoreError),
656 #[error(transparent)]
657 OpHeadResolution(#[from] OpHeadResolutionError),
658 #[error(transparent)]
659 OpHeadsStoreError(#[from] OpHeadsStoreError),
660 #[error(transparent)]
661 OpStore(#[from] OpStoreError),
662 #[error(transparent)]
663 TransactionCommit(#[from] TransactionCommitError),
664}
665
666#[derive(Clone)]
669pub struct RepoLoader {
670 settings: UserSettings,
671 store: Arc<Store>,
672 op_store: Arc<dyn OpStore>,
673 op_heads_store: Arc<dyn OpHeadsStore>,
674 index_store: Arc<dyn IndexStore>,
675 submodule_store: Arc<dyn SubmoduleStore>,
676}
677
678impl RepoLoader {
679 pub fn new(
680 settings: UserSettings,
681 store: Arc<Store>,
682 op_store: Arc<dyn OpStore>,
683 op_heads_store: Arc<dyn OpHeadsStore>,
684 index_store: Arc<dyn IndexStore>,
685 submodule_store: Arc<dyn SubmoduleStore>,
686 ) -> Self {
687 Self {
688 settings,
689 store,
690 op_store,
691 op_heads_store,
692 index_store,
693 submodule_store,
694 }
695 }
696
697 pub fn init_from_file_system(
701 settings: &UserSettings,
702 repo_path: &Path,
703 store_factories: &StoreFactories,
704 ) -> Result<Self, StoreLoadError> {
705 let merge_options =
706 MergeOptions::from_settings(settings).map_err(|err| BackendLoadError(err.into()))?;
707 let store = Store::new(
708 store_factories.load_backend(settings, &repo_path.join("store"))?,
709 Signer::from_settings(settings)?,
710 merge_options,
711 );
712 let root_op_data = RootOperationData {
713 root_commit_id: store.root_commit_id().clone(),
714 };
715 let op_store = Arc::from(store_factories.load_op_store(
716 settings,
717 &repo_path.join("op_store"),
718 root_op_data,
719 )?);
720 let op_heads_store =
721 Arc::from(store_factories.load_op_heads_store(settings, &repo_path.join("op_heads"))?);
722 let index_store =
723 Arc::from(store_factories.load_index_store(settings, &repo_path.join("index"))?);
724 let submodule_store = Arc::from(
725 store_factories.load_submodule_store(settings, &repo_path.join("submodule_store"))?,
726 );
727 Ok(Self {
728 settings: settings.clone(),
729 store,
730 op_store,
731 op_heads_store,
732 index_store,
733 submodule_store,
734 })
735 }
736
737 pub fn settings(&self) -> &UserSettings {
738 &self.settings
739 }
740
741 pub fn store(&self) -> &Arc<Store> {
742 &self.store
743 }
744
745 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
746 &self.index_store
747 }
748
749 pub fn op_store(&self) -> &Arc<dyn OpStore> {
750 &self.op_store
751 }
752
753 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
754 &self.op_heads_store
755 }
756
757 pub fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
758 &self.submodule_store
759 }
760
761 pub async fn load_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
762 let op = op_heads_store::resolve_op_heads(
763 self.op_heads_store.as_ref(),
764 &self.op_store,
765 async |op_heads| self.resolve_op_heads(op_heads).await,
766 )
767 .await?;
768 let view = op.view().await?;
769 self.finish_load(op, view).await
770 }
771
772 #[instrument(skip(self))]
773 pub async fn load_at(&self, op: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
774 let view = op.view().await?;
775 self.finish_load(op.clone(), view).await
776 }
777
778 pub fn create_from(
779 &self,
780 operation: Operation,
781 view: View,
782 index: Box<dyn ReadonlyIndex>,
783 ) -> Arc<ReadonlyRepo> {
784 let repo = ReadonlyRepo {
785 loader: self.clone(),
786 operation,
787 index,
788 change_id_index: OnceCell::new(),
789 view,
790 };
791 Arc::new(repo)
792 }
793
794 pub async fn root_operation(&self) -> Operation {
799 self.load_operation(self.op_store.root_operation_id())
800 .await
801 .expect("failed to read root operation")
802 }
803
804 pub async fn load_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
806 let data = self.op_store.read_operation(id).await?;
807 Ok(Operation::new(self.op_store.clone(), id.clone(), data))
808 }
809
810 pub async fn merge_operations(
813 &self,
814 operations: Vec<Operation>,
815 tx_description: Option<&str>,
816 ) -> Result<Operation, RepoLoaderError> {
817 let num_operations = operations.len();
818 let mut operations = operations.into_iter();
819 let Some(base_op) = operations.next() else {
820 return Ok(self.root_operation().await);
821 };
822 let final_op = if num_operations > 1 {
823 let base_repo = self.load_at(&base_op).await?;
824 let mut tx = base_repo.start_transaction();
825 for other_op in operations {
826 tx.merge_operation(other_op).await?;
827 tx.repo_mut().rebase_descendants().await?;
828 }
829 let tx_description = tx_description.map_or_else(
830 || format!("merge {num_operations} operations"),
831 |tx_description| tx_description.to_string(),
832 );
833 let merged_repo = tx.write(tx_description).await?.leave_unpublished();
834 merged_repo.operation().clone()
835 } else {
836 base_op
837 };
838
839 Ok(final_op)
840 }
841
842 async fn resolve_op_heads(
843 &self,
844 op_heads: Vec<Operation>,
845 ) -> Result<Operation, RepoLoaderError> {
846 assert!(!op_heads.is_empty());
847 self.merge_operations(op_heads, Some("reconcile divergent operations"))
848 .await
849 }
850
851 async fn finish_load(
852 &self,
853 operation: Operation,
854 view: View,
855 ) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
856 let index = self
857 .index_store
858 .get_index_at_op(&operation, &self.store)
859 .await?;
860 let repo = ReadonlyRepo {
861 loader: self.clone(),
862 operation,
863 index,
864 change_id_index: OnceCell::new(),
865 view,
866 };
867 Ok(Arc::new(repo))
868 }
869}
870
871#[derive(Clone, Debug, PartialEq, Eq)]
872enum Rewrite {
873 Rewritten(CommitId),
876 Divergent(Vec<CommitId>),
879 Abandoned(Vec<CommitId>),
882}
883
884impl Rewrite {
885 fn new_parent_ids(&self) -> &[CommitId] {
886 match self {
887 Self::Rewritten(new_parent_id) => std::slice::from_ref(new_parent_id),
888 Self::Divergent(new_parent_ids) => new_parent_ids.as_slice(),
889 Self::Abandoned(new_parent_ids) => new_parent_ids.as_slice(),
890 }
891 }
892}
893
894pub struct MutableRepo {
895 base_repo: Arc<ReadonlyRepo>,
896 index: Box<dyn MutableIndex>,
897 view: DirtyCell<View>,
898 commit_predecessors: BTreeMap<CommitId, Vec<CommitId>>,
903 parent_mapping: HashMap<CommitId, Rewrite>,
912}
913
914impl MutableRepo {
915 pub fn new(base_repo: Arc<ReadonlyRepo>, index: &dyn ReadonlyIndex, view: &View) -> Self {
916 let mut_view = view.clone();
917 let mut_index = index.start_modification();
918 Self {
919 base_repo,
920 index: mut_index,
921 view: DirtyCell::with_clean(mut_view),
922 commit_predecessors: Default::default(),
923 parent_mapping: Default::default(),
924 }
925 }
926
927 pub fn base_repo(&self) -> &Arc<ReadonlyRepo> {
928 &self.base_repo
929 }
930
931 fn view_mut(&mut self) -> &mut View {
932 self.view.get_mut()
933 }
934
935 pub fn mutable_index(&self) -> &dyn MutableIndex {
936 self.index.as_ref()
937 }
938
939 pub(crate) fn is_backed_by_default_index(&self) -> bool {
940 self.index.downcast_ref::<DefaultMutableIndex>().is_some()
941 }
942
943 pub fn has_changes(&self) -> bool {
944 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
945 !(self.commit_predecessors.is_empty()
946 && self.parent_mapping.is_empty()
947 && self.view() == &self.base_repo.view)
948 }
949
950 pub(crate) fn consume(
951 self,
952 ) -> (
953 Box<dyn MutableIndex>,
954 View,
955 BTreeMap<CommitId, Vec<CommitId>>,
956 ) {
957 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
958 (self.index, self.view.into_inner(), self.commit_predecessors)
959 }
960
961 pub fn new_commit(&mut self, parents: Vec<CommitId>, tree: MergedTree) -> CommitBuilder<'_> {
963 let settings = self.base_repo.settings();
964 DetachedCommitBuilder::for_new_commit(self, settings, parents, tree).attach(self)
965 }
966
967 pub fn rewrite_commit(&mut self, predecessor: &Commit) -> CommitBuilder<'_> {
969 let settings = self.base_repo.settings();
970 DetachedCommitBuilder::for_rewrite_from(self, settings, predecessor).attach(self)
971 }
974
975 pub(crate) fn set_predecessors(&mut self, id: CommitId, predecessors: Vec<CommitId>) {
976 self.commit_predecessors.insert(id, predecessors);
977 }
978
979 pub fn set_rewritten_commit(&mut self, old_id: CommitId, new_id: CommitId) {
986 assert_ne!(old_id, *self.store().root_commit_id());
987 self.parent_mapping
988 .insert(old_id, Rewrite::Rewritten(new_id));
989 }
990
991 pub fn set_divergent_rewrite(
999 &mut self,
1000 old_id: CommitId,
1001 new_ids: impl IntoIterator<Item = CommitId>,
1002 ) {
1003 assert_ne!(old_id, *self.store().root_commit_id());
1004 self.parent_mapping.insert(
1005 old_id.clone(),
1006 Rewrite::Divergent(new_ids.into_iter().collect()),
1007 );
1008 }
1009
1010 pub fn record_abandoned_commit(&mut self, old_commit: &Commit) {
1020 assert_ne!(old_commit.id(), self.store().root_commit_id());
1021 self.record_abandoned_commit_with_parents(
1023 old_commit.id().clone(),
1024 old_commit.parent_ids().iter().cloned(),
1025 );
1026 }
1027
1028 pub fn record_abandoned_commit_with_parents(
1034 &mut self,
1035 old_id: CommitId,
1036 new_parent_ids: impl IntoIterator<Item = CommitId>,
1037 ) {
1038 assert_ne!(old_id, *self.store().root_commit_id());
1039 self.parent_mapping.insert(
1040 old_id,
1041 Rewrite::Abandoned(new_parent_ids.into_iter().collect()),
1042 );
1043 }
1044
1045 pub fn has_rewrites(&self) -> bool {
1046 !self.parent_mapping.is_empty()
1047 }
1048
1049 pub fn new_parents(&self, old_ids: &[CommitId]) -> Vec<CommitId> {
1056 self.rewritten_ids_with(old_ids, |rewrite| !matches!(rewrite, Rewrite::Divergent(_)))
1057 }
1058
1059 fn rewritten_ids_with(
1060 &self,
1061 old_ids: &[CommitId],
1062 mut predicate: impl FnMut(&Rewrite) -> bool,
1063 ) -> Vec<CommitId> {
1064 assert!(!old_ids.is_empty());
1065 let mut new_ids = Vec::with_capacity(old_ids.len());
1066 let mut to_visit = old_ids.iter().rev().collect_vec();
1067 let mut visited = HashSet::new();
1068 while let Some(id) = to_visit.pop() {
1069 if !visited.insert(id) {
1070 continue;
1071 }
1072 match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1073 None => {
1074 new_ids.push(id.clone());
1075 }
1076 Some(rewrite) => {
1077 let replacements = rewrite.new_parent_ids();
1078 assert!(
1079 !replacements.is_empty(),
1084 "Found empty value for key {id:?} in the parent mapping",
1085 );
1086 to_visit.extend(replacements.iter().rev());
1087 }
1088 }
1089 }
1090 assert!(
1091 !new_ids.is_empty(),
1092 "new ids become empty because of cycle in the parent mapping"
1093 );
1094 debug_assert!(new_ids.iter().all_unique());
1095 new_ids
1096 }
1097
1098 fn resolve_rewrite_mapping_with(
1102 &self,
1103 mut predicate: impl FnMut(&Rewrite) -> bool,
1104 ) -> BackendResult<HashMap<CommitId, Vec<CommitId>>> {
1105 let sorted_ids = dag_walk::topo_order_forward(
1106 self.parent_mapping.keys(),
1107 |&id| id,
1108 |&id| match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1109 None => &[],
1110 Some(rewrite) => rewrite.new_parent_ids(),
1111 },
1112 |id| {
1113 BackendError::Other(
1114 format!("Cycle between rewritten commits involving commit {id}").into(),
1115 )
1116 },
1117 )?;
1118 let mut new_mapping: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1119 for old_id in sorted_ids {
1120 let Some(rewrite) = self.parent_mapping.get(old_id).filter(|&v| predicate(v)) else {
1121 continue;
1122 };
1123 let lookup = |id| new_mapping.get(id).map_or(slice::from_ref(id), |ids| ids);
1124 let new_ids = match rewrite.new_parent_ids() {
1125 [id] => lookup(id).to_vec(), ids => ids.iter().flat_map(lookup).unique().cloned().collect(),
1127 };
1128 debug_assert_eq!(
1129 new_ids,
1130 self.rewritten_ids_with(slice::from_ref(old_id), &mut predicate)
1131 );
1132 new_mapping.insert(old_id.clone(), new_ids);
1133 }
1134 Ok(new_mapping)
1135 }
1136
1137 pub async fn update_rewritten_references(
1140 &mut self,
1141 options: &RewriteRefsOptions,
1142 ) -> BackendResult<()> {
1143 self.update_all_references(options).await?;
1144 self.update_heads()
1145 .map_err(|err| err.into_backend_error())?;
1146 Ok(())
1147 }
1148
1149 async fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> {
1150 let rewrite_mapping = self.resolve_rewrite_mapping_with(|_| true)?;
1151 self.update_local_bookmarks(&rewrite_mapping, options)
1152 .map_err(|err| BackendError::Other(err.into()))?;
1154 self.update_wc_commits(&rewrite_mapping).await?;
1155 Ok(())
1156 }
1157
1158 fn update_local_bookmarks(
1159 &mut self,
1160 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1161 options: &RewriteRefsOptions,
1162 ) -> IndexResult<()> {
1163 let changed_branches = self
1164 .view()
1165 .local_bookmarks()
1166 .flat_map(|(name, target)| {
1167 target.added_ids().filter_map(|id| {
1168 let change = rewrite_mapping.get_key_value(id)?;
1169 Some((name.to_owned(), change))
1170 })
1171 })
1172 .collect_vec();
1173 for (bookmark_name, (old_commit_id, new_commit_ids)) in changed_branches {
1174 let should_delete = options.delete_abandoned_bookmarks
1175 && matches!(
1176 self.parent_mapping.get(old_commit_id),
1177 Some(Rewrite::Abandoned(_))
1178 );
1179 let old_target = RefTarget::normal(old_commit_id.clone());
1180 let new_target = if should_delete {
1181 RefTarget::absent()
1182 } else {
1183 let ids = itertools::intersperse(new_commit_ids, old_commit_id)
1184 .map(|id| Some(id.clone()));
1185 RefTarget::from_merge(MergeBuilder::from_iter(ids).build())
1186 };
1187
1188 self.merge_local_bookmark(&bookmark_name, &old_target, &new_target)?;
1189 }
1190 Ok(())
1191 }
1192
1193 async fn update_wc_commits(
1194 &mut self,
1195 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1196 ) -> BackendResult<()> {
1197 let changed_wc_commits = self
1198 .view()
1199 .wc_commit_ids()
1200 .iter()
1201 .filter_map(|(name, commit_id)| {
1202 let change = rewrite_mapping.get_key_value(commit_id)?;
1203 Some((name.to_owned(), change))
1204 })
1205 .collect_vec();
1206 let mut recreated_wc_commits: HashMap<&CommitId, Commit> = HashMap::new();
1207 for (name, (old_commit_id, new_commit_ids)) in changed_wc_commits {
1208 let abandoned_old_commit = matches!(
1209 self.parent_mapping.get(old_commit_id),
1210 Some(Rewrite::Abandoned(_))
1211 );
1212 let new_wc_commit = if !abandoned_old_commit {
1213 self.store().get_commit_async(&new_commit_ids[0]).await?
1215 } else if let Some(commit) = recreated_wc_commits.get(old_commit_id) {
1216 commit.clone()
1217 } else {
1218 let new_commit_futures = new_commit_ids
1219 .iter()
1220 .map(async |id| self.store().get_commit_async(id).await);
1221 let new_commits = try_join_all(new_commit_futures).await?;
1222 let merged_parents_tree = merge_commit_trees(self, &new_commits).await?;
1223 let commit = self
1224 .new_commit(new_commit_ids.clone(), merged_parents_tree)
1225 .write()
1226 .await?;
1227 recreated_wc_commits.insert(old_commit_id, commit.clone());
1228 commit
1229 };
1230 self.edit(name, &new_wc_commit)
1231 .await
1232 .map_err(|err| match err {
1233 EditCommitError::BackendError(backend_error) => backend_error,
1234 EditCommitError::WorkingCopyCommitNotFound(_)
1235 | EditCommitError::RewriteRootCommit(_) => panic!("unexpected error: {err:?}"),
1236 })?;
1237 }
1238 Ok(())
1239 }
1240
1241 fn update_heads(&mut self) -> Result<(), RevsetEvaluationError> {
1242 let old_commits_expression =
1243 RevsetExpression::commits(self.parent_mapping.keys().cloned().collect())
1244 .intersection(&RevsetExpression::visible_heads().ancestors());
1245 let heads_to_add_expression = old_commits_expression
1246 .parents()
1247 .minus(&old_commits_expression);
1248 let heads_to_add: Vec<_> = heads_to_add_expression
1249 .evaluate(self)?
1250 .iter()
1251 .try_collect()?;
1252
1253 let mut view = self.view().store_view().clone();
1254 for commit_id in self.parent_mapping.keys() {
1255 view.head_ids.remove(commit_id);
1256 }
1257 view.head_ids.extend(heads_to_add);
1258 self.set_view(view);
1259 Ok(())
1260 }
1261
1262 pub fn find_descendants_for_rebase(&self, roots: Vec<CommitId>) -> BackendResult<Vec<Commit>> {
1265 let to_visit_revset = RevsetExpression::commits(roots)
1266 .descendants()
1267 .minus(&RevsetExpression::commits(
1268 self.parent_mapping.keys().cloned().collect(),
1269 ))
1270 .evaluate(self)
1271 .map_err(|err| err.into_backend_error())?;
1272 let to_visit = to_visit_revset
1273 .iter()
1274 .commits(self.store())
1275 .try_collect()
1276 .map_err(|err| err.into_backend_error())?;
1277 Ok(to_visit)
1278 }
1279
1280 fn order_commits_for_rebase(
1283 &self,
1284 to_visit: Vec<Commit>,
1285 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1286 ) -> BackendResult<Vec<Commit>> {
1287 let to_visit_set: HashSet<CommitId> =
1288 to_visit.iter().map(|commit| commit.id().clone()).collect();
1289 let mut visited = HashSet::new();
1290 let store = self.store();
1293 dag_walk::topo_order_reverse_ok(
1294 to_visit.into_iter().map(Ok),
1295 |commit| commit.id().clone(),
1296 |commit| -> Vec<BackendResult<Commit>> {
1297 visited.insert(commit.id().clone());
1298 let mut dependents = vec![];
1299 let parent_ids = new_parents_map
1300 .get(commit.id())
1301 .map_or(commit.parent_ids(), |parent_ids| parent_ids);
1302 for parent_id in parent_ids {
1303 let parent = store.get_commit(parent_id);
1304 let Ok(parent) = parent else {
1305 dependents.push(parent);
1306 continue;
1307 };
1308 if let Some(rewrite) = self.parent_mapping.get(parent.id()) {
1309 for target in rewrite.new_parent_ids() {
1310 if to_visit_set.contains(target) && !visited.contains(target) {
1311 dependents.push(store.get_commit(target));
1312 }
1313 }
1314 }
1315 if to_visit_set.contains(parent.id()) {
1316 dependents.push(Ok(parent));
1317 }
1318 }
1319 dependents
1320 },
1321 |_| panic!("graph has cycle"),
1322 )
1323 }
1324
1325 pub async fn transform_descendants(
1337 &mut self,
1338 roots: Vec<CommitId>,
1339 callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1340 ) -> BackendResult<()> {
1341 let options = RewriteRefsOptions::default();
1342 self.transform_descendants_with_options(roots, &HashMap::new(), &options, callback)
1343 .await
1344 }
1345
1346 pub async fn transform_descendants_with_options(
1354 &mut self,
1355 roots: Vec<CommitId>,
1356 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1357 options: &RewriteRefsOptions,
1358 callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1359 ) -> BackendResult<()> {
1360 let descendants = self.find_descendants_for_rebase(roots)?;
1361 self.transform_commits(descendants, new_parents_map, options, callback)
1362 .await
1363 }
1364
1365 pub async fn transform_commits(
1373 &mut self,
1374 commits: Vec<Commit>,
1375 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1376 options: &RewriteRefsOptions,
1377 mut callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1378 ) -> BackendResult<()> {
1379 let mut to_visit = self.order_commits_for_rebase(commits, new_parents_map)?;
1380 while let Some(old_commit) = to_visit.pop() {
1381 let parent_ids = new_parents_map
1382 .get(old_commit.id())
1383 .map_or(old_commit.parent_ids(), |parent_ids| parent_ids);
1384 let new_parent_ids = self.new_parents(parent_ids);
1385 let rewriter = CommitRewriter::new(self, old_commit, new_parent_ids);
1386 callback(rewriter).await?;
1387 }
1388 self.update_rewritten_references(options).await?;
1389 Ok(())
1399 }
1400
1401 pub async fn rebase_descendants_with_options(
1417 &mut self,
1418 options: &RebaseOptions,
1419 mut progress: impl FnMut(Commit, RebasedCommit),
1420 ) -> BackendResult<()> {
1421 let roots = self.parent_mapping.keys().cloned().collect();
1422 self.transform_descendants_with_options(
1423 roots,
1424 &HashMap::new(),
1425 &options.rewrite_refs,
1426 async |rewriter| {
1427 if rewriter.parents_changed() {
1428 let old_commit = rewriter.old_commit().clone();
1429 let rebased_commit = rebase_commit_with_options(rewriter, options).await?;
1430 progress(old_commit, rebased_commit);
1431 }
1432 Ok(())
1433 },
1434 )
1435 .await?;
1436 self.parent_mapping.clear();
1437 Ok(())
1438 }
1439
1440 pub async fn rebase_descendants(&mut self) -> BackendResult<usize> {
1450 let options = RebaseOptions::default();
1451 let mut num_rebased = 0;
1452 self.rebase_descendants_with_options(&options, |_old_commit, _rebased_commit| {
1453 num_rebased += 1;
1454 })
1455 .await?;
1456 Ok(num_rebased)
1457 }
1458
1459 pub async fn reparent_descendants(&mut self) -> BackendResult<usize> {
1466 let roots = self.parent_mapping.keys().cloned().collect_vec();
1467 let mut num_reparented = 0;
1468 self.transform_descendants(roots, async |rewriter| {
1469 if rewriter.parents_changed() {
1470 let builder = rewriter.reparent();
1471 builder.write().await?;
1472 num_reparented += 1;
1473 }
1474 Ok(())
1475 })
1476 .await?;
1477 self.parent_mapping.clear();
1478 Ok(num_reparented)
1479 }
1480
1481 pub fn set_wc_commit(
1482 &mut self,
1483 name: WorkspaceNameBuf,
1484 commit_id: CommitId,
1485 ) -> Result<(), RewriteRootCommit> {
1486 if &commit_id == self.store().root_commit_id() {
1487 return Err(RewriteRootCommit);
1488 }
1489 self.view_mut().set_wc_commit(name, commit_id);
1490 Ok(())
1491 }
1492
1493 pub async fn remove_wc_commit(&mut self, name: &WorkspaceName) -> Result<(), EditCommitError> {
1494 self.maybe_abandon_wc_commit(name).await?;
1495 self.view_mut().remove_wc_commit(name);
1496 Ok(())
1497 }
1498
1499 fn merge_wc_commit(
1502 &mut self,
1503 name: &WorkspaceName,
1504 base_id: Option<&CommitId>,
1505 other_id: Option<&CommitId>,
1506 ) {
1507 let view = self.view.get_mut();
1508 let self_id = view.get_wc_commit_id(name);
1509 let new_id = if let Some(resolved) =
1513 trivial_merge(&[self_id, base_id, other_id], SameChange::Accept)
1514 {
1515 resolved.cloned()
1516 } else if self_id.is_none() || other_id.is_none() {
1517 None
1520 } else {
1521 self_id.cloned()
1522 };
1523 match new_id {
1524 Some(id) => view.set_wc_commit(name.to_owned(), id),
1525 None => view.remove_wc_commit(name),
1526 }
1527 }
1528
1529 pub fn rename_workspace(
1530 &mut self,
1531 old_name: &WorkspaceName,
1532 new_name: WorkspaceNameBuf,
1533 ) -> Result<(), RenameWorkspaceError> {
1534 self.view_mut().rename_workspace(old_name, new_name)
1535 }
1536
1537 pub async fn check_out(
1538 &mut self,
1539 name: WorkspaceNameBuf,
1540 commit: &Commit,
1541 ) -> Result<Commit, CheckOutCommitError> {
1542 let wc_commit = self
1543 .new_commit(vec![commit.id().clone()], commit.tree())
1544 .write()
1545 .await?;
1546 self.edit(name, &wc_commit).await?;
1547 Ok(wc_commit)
1548 }
1549
1550 pub async fn edit(
1551 &mut self,
1552 name: WorkspaceNameBuf,
1553 commit: &Commit,
1554 ) -> Result<(), EditCommitError> {
1555 self.maybe_abandon_wc_commit(&name).await?;
1556 self.add_head(commit)?;
1557 Ok(self.set_wc_commit(name, commit.id().clone())?)
1558 }
1559
1560 async fn maybe_abandon_wc_commit(
1561 &mut self,
1562 workspace_name: &WorkspaceName,
1563 ) -> Result<(), EditCommitError> {
1564 let is_commit_referenced = |view: &View, commit_id: &CommitId| -> bool {
1565 view.wc_commit_ids()
1566 .iter()
1567 .filter(|&(name, _)| name != workspace_name)
1568 .map(|(_, wc_id)| wc_id)
1569 .chain(
1570 view.local_bookmarks()
1571 .flat_map(|(_, target)| target.added_ids()),
1572 )
1573 .any(|id| id == commit_id)
1574 };
1575
1576 let maybe_wc_commit_id = self
1577 .view
1578 .with_ref(|v| v.get_wc_commit_id(workspace_name).cloned());
1579 if let Some(wc_commit_id) = maybe_wc_commit_id {
1580 let wc_commit = self
1581 .store()
1582 .get_commit_async(&wc_commit_id)
1583 .await
1584 .map_err(EditCommitError::WorkingCopyCommitNotFound)?;
1585 if wc_commit.is_discardable(self)?
1586 && self
1587 .view
1588 .with_ref(|v| !is_commit_referenced(v, wc_commit.id()))
1589 && self.view().heads().contains(wc_commit.id())
1590 {
1591 self.record_abandoned_commit(&wc_commit);
1595 }
1596 }
1597
1598 Ok(())
1599 }
1600
1601 fn enforce_view_invariants(&self, view: &mut View) {
1602 let view = view.store_view_mut();
1603 let root_commit_id = self.store().root_commit_id();
1604 if view.head_ids.is_empty() {
1605 view.head_ids.insert(root_commit_id.clone());
1606 } else if view.head_ids.len() > 1 {
1607 view.head_ids.remove(root_commit_id);
1610 view.head_ids = self
1614 .index()
1615 .heads(&mut view.head_ids.iter())
1616 .unwrap()
1617 .into_iter()
1618 .collect();
1619 }
1620 assert!(!view.head_ids.is_empty());
1621 }
1622
1623 pub fn add_head(&mut self, head: &Commit) -> BackendResult<()> {
1626 self.add_heads(slice::from_ref(head))
1627 }
1628
1629 pub fn add_heads(&mut self, heads: &[Commit]) -> BackendResult<()> {
1636 let current_heads = self.view.get_mut().heads();
1637 match heads {
1641 [] => {}
1642 [head]
1643 if head
1644 .parent_ids()
1645 .iter()
1646 .all(|parent_id| current_heads.contains(parent_id)) =>
1647 {
1648 self.index
1649 .add_commit(head)
1650 .block_on()
1651 .map_err(|err| BackendError::Other(err.into()))?;
1653 self.view.get_mut().add_head(head.id());
1654 for parent_id in head.parent_ids() {
1655 self.view.get_mut().remove_head(parent_id);
1656 }
1657 }
1658 _ => {
1659 let missing_commits = dag_walk::topo_order_reverse_ord_ok(
1660 heads
1661 .iter()
1662 .cloned()
1663 .map(CommitByCommitterTimestamp)
1664 .map(Ok),
1665 |CommitByCommitterTimestamp(commit)| commit.id().clone(),
1666 |CommitByCommitterTimestamp(commit)| {
1667 commit
1668 .parent_ids()
1669 .iter()
1670 .filter_map(|id| match self.index().has_id(id) {
1671 Ok(false) => Some(
1672 self.store().get_commit(id).map(CommitByCommitterTimestamp),
1673 ),
1674 Ok(true) => None,
1675 Err(err) => Some(Err(BackendError::Other(err.into()))),
1677 })
1678 .collect_vec()
1679 },
1680 |_| panic!("graph has cycle"),
1681 )?;
1682 for CommitByCommitterTimestamp(missing_commit) in missing_commits.iter().rev() {
1683 self.index
1684 .add_commit(missing_commit)
1685 .block_on()
1686 .map_err(|err| BackendError::Other(err.into()))?;
1688 }
1689 for head in heads {
1690 self.view.get_mut().add_head(head.id());
1691 }
1692 self.view.mark_dirty();
1693 }
1694 }
1695 Ok(())
1696 }
1697
1698 pub fn remove_head(&mut self, head: &CommitId) {
1699 self.view_mut().remove_head(head);
1700 self.view.mark_dirty();
1701 }
1702
1703 pub fn get_local_bookmark(&self, name: &RefName) -> RefTarget {
1704 self.view.with_ref(|v| v.get_local_bookmark(name).clone())
1705 }
1706
1707 pub fn set_local_bookmark_target(&mut self, name: &RefName, target: RefTarget) {
1708 let view = self.view_mut();
1709 for id in target.added_ids() {
1710 view.add_head(id);
1711 }
1712 view.set_local_bookmark_target(name, target);
1713 self.view.mark_dirty();
1714 }
1715
1716 pub fn merge_local_bookmark(
1717 &mut self,
1718 name: &RefName,
1719 base_target: &RefTarget,
1720 other_target: &RefTarget,
1721 ) -> IndexResult<()> {
1722 let view = self.view.get_mut();
1723 let index = self.index.as_index();
1724 let self_target = view.get_local_bookmark(name);
1725 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1726 self.set_local_bookmark_target(name, new_target);
1727 Ok(())
1728 }
1729
1730 pub fn get_remote_bookmark(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1731 self.view
1732 .with_ref(|v| v.get_remote_bookmark(symbol).clone())
1733 }
1734
1735 pub fn set_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1736 self.view_mut().set_remote_bookmark(symbol, remote_ref);
1737 }
1738
1739 fn merge_remote_bookmark(
1740 &mut self,
1741 symbol: RemoteRefSymbol<'_>,
1742 base_ref: &RemoteRef,
1743 other_ref: &RemoteRef,
1744 ) -> IndexResult<()> {
1745 let view = self.view.get_mut();
1746 let index = self.index.as_index();
1747 let self_ref = view.get_remote_bookmark(symbol);
1748 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref)?;
1749 view.set_remote_bookmark(symbol, new_ref);
1750 Ok(())
1751 }
1752
1753 pub fn track_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) -> IndexResult<()> {
1756 let mut remote_ref = self.get_remote_bookmark(symbol);
1757 let base_target = remote_ref.tracked_target();
1758 self.merge_local_bookmark(symbol.name, base_target, &remote_ref.target)?;
1759 remote_ref.state = RemoteRefState::Tracked;
1760 self.set_remote_bookmark(symbol, remote_ref);
1761 Ok(())
1762 }
1763
1764 pub fn untrack_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1766 let mut remote_ref = self.get_remote_bookmark(symbol);
1767 remote_ref.state = RemoteRefState::New;
1768 self.set_remote_bookmark(symbol, remote_ref);
1769 }
1770
1771 pub fn ensure_remote(&mut self, remote_name: &RemoteName) {
1772 self.view_mut().ensure_remote(remote_name);
1773 }
1774
1775 pub fn remove_remote(&mut self, remote_name: &RemoteName) {
1776 self.view_mut().remove_remote(remote_name);
1777 }
1778
1779 pub fn rename_remote(&mut self, old: &RemoteName, new: &RemoteName) {
1780 self.view_mut().rename_remote(old, new);
1781 }
1782
1783 pub fn get_local_tag(&self, name: &RefName) -> RefTarget {
1784 self.view.with_ref(|v| v.get_local_tag(name).clone())
1785 }
1786
1787 pub fn set_local_tag_target(&mut self, name: &RefName, target: RefTarget) {
1788 self.view_mut().set_local_tag_target(name, target);
1789 }
1790
1791 pub fn merge_local_tag(
1792 &mut self,
1793 name: &RefName,
1794 base_target: &RefTarget,
1795 other_target: &RefTarget,
1796 ) -> IndexResult<()> {
1797 let view = self.view.get_mut();
1798 let index = self.index.as_index();
1799 let self_target = view.get_local_tag(name);
1800 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1801 view.set_local_tag_target(name, new_target);
1802 Ok(())
1803 }
1804
1805 pub fn get_remote_tag(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1806 self.view.with_ref(|v| v.get_remote_tag(symbol).clone())
1807 }
1808
1809 pub fn set_remote_tag(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1810 self.view_mut().set_remote_tag(symbol, remote_ref);
1811 }
1812
1813 fn merge_remote_tag(
1814 &mut self,
1815 symbol: RemoteRefSymbol<'_>,
1816 base_ref: &RemoteRef,
1817 other_ref: &RemoteRef,
1818 ) -> IndexResult<()> {
1819 let view = self.view.get_mut();
1820 let index = self.index.as_index();
1821 let self_ref = view.get_remote_tag(symbol);
1822 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref)?;
1823 view.set_remote_tag(symbol, new_ref);
1824 Ok(())
1825 }
1826
1827 pub fn get_git_ref(&self, name: &GitRefName) -> RefTarget {
1828 self.view.with_ref(|v| v.get_git_ref(name).clone())
1829 }
1830
1831 pub fn set_git_ref_target(&mut self, name: &GitRefName, target: RefTarget) {
1832 self.view_mut().set_git_ref_target(name, target);
1833 }
1834
1835 fn merge_git_ref(
1836 &mut self,
1837 name: &GitRefName,
1838 base_target: &RefTarget,
1839 other_target: &RefTarget,
1840 ) -> IndexResult<()> {
1841 let view = self.view.get_mut();
1842 let index = self.index.as_index();
1843 let self_target = view.get_git_ref(name);
1844 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1845 view.set_git_ref_target(name, new_target);
1846 Ok(())
1847 }
1848
1849 pub fn git_head(&self) -> RefTarget {
1850 self.view.with_ref(|v| v.git_head().clone())
1851 }
1852
1853 pub fn set_git_head_target(&mut self, target: RefTarget) {
1854 self.view_mut().set_git_head_target(target);
1855 }
1856
1857 pub fn set_view(&mut self, data: op_store::View) {
1858 self.view_mut().set_view(data);
1859 self.view.mark_dirty();
1860 }
1861
1862 pub async fn merge(
1863 &mut self,
1864 base_repo: &ReadonlyRepo,
1865 other_repo: &ReadonlyRepo,
1866 ) -> Result<(), RepoLoaderError> {
1867 self.index.merge_in(base_repo.readonly_index())?;
1872 self.index.merge_in(other_repo.readonly_index())?;
1873
1874 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
1875 self.merge_view(&base_repo.view, &other_repo.view).await?;
1876 self.view.mark_dirty();
1877 Ok(())
1878 }
1879
1880 pub fn merge_index(&mut self, other_repo: &ReadonlyRepo) -> IndexResult<()> {
1881 self.index.merge_in(other_repo.readonly_index())
1882 }
1883
1884 async fn merge_view(&mut self, base: &View, other: &View) -> Result<(), RepoLoaderError> {
1885 let changed_wc_commits = diff_named_commit_ids(base.wc_commit_ids(), other.wc_commit_ids());
1886 for (name, (base_id, other_id)) in changed_wc_commits {
1887 self.merge_wc_commit(name, base_id, other_id);
1888 }
1889
1890 let base_heads = base.heads().iter().cloned().collect_vec();
1891 let own_heads = self.view().heads().iter().cloned().collect_vec();
1892 let other_heads = other.heads().iter().cloned().collect_vec();
1893
1894 if self.is_backed_by_default_index() {
1901 self.record_rewrites(&base_heads, &own_heads).await?;
1902 self.record_rewrites(&base_heads, &other_heads).await?;
1903 } else {
1906 for removed_head in base.heads().difference(other.heads()) {
1907 self.view_mut().remove_head(removed_head);
1908 }
1909 }
1910 for added_head in other.heads().difference(base.heads()) {
1911 self.view_mut().add_head(added_head);
1912 }
1913
1914 let changed_local_bookmarks =
1915 diff_named_ref_targets(base.local_bookmarks(), other.local_bookmarks());
1916 for (name, (base_target, other_target)) in changed_local_bookmarks {
1917 self.merge_local_bookmark(name, base_target, other_target)?;
1918 }
1919
1920 let changed_local_tags = diff_named_ref_targets(base.local_tags(), other.local_tags());
1921 for (name, (base_target, other_target)) in changed_local_tags {
1922 self.merge_local_tag(name, base_target, other_target)?;
1923 }
1924
1925 let changed_git_refs = diff_named_ref_targets(base.git_refs(), other.git_refs());
1926 for (name, (base_target, other_target)) in changed_git_refs {
1927 self.merge_git_ref(name, base_target, other_target)?;
1928 }
1929
1930 let changed_remote_bookmarks =
1931 diff_named_remote_refs(base.all_remote_bookmarks(), other.all_remote_bookmarks());
1932 for (symbol, (base_ref, other_ref)) in changed_remote_bookmarks {
1933 self.merge_remote_bookmark(symbol, base_ref, other_ref)?;
1934 }
1935
1936 let changed_remote_tags =
1937 diff_named_remote_refs(base.all_remote_tags(), other.all_remote_tags());
1938 for (symbol, (base_ref, other_ref)) in changed_remote_tags {
1939 self.merge_remote_tag(symbol, base_ref, other_ref)?;
1940 }
1941
1942 let new_git_head_target = merge_ref_targets(
1943 self.index(),
1944 self.view().git_head(),
1945 base.git_head(),
1946 other.git_head(),
1947 )?;
1948 self.set_git_head_target(new_git_head_target);
1949
1950 Ok(())
1951 }
1952
1953 async fn record_rewrites(
1956 &mut self,
1957 old_heads: &[CommitId],
1958 new_heads: &[CommitId],
1959 ) -> BackendResult<()> {
1960 let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
1961 for item in revset::walk_revs(self, old_heads, new_heads)
1962 .map_err(|err| err.into_backend_error())?
1963 .commit_change_ids()
1964 {
1965 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1966 removed_changes
1967 .entry(change_id)
1968 .or_default()
1969 .push(commit_id);
1970 }
1971 if removed_changes.is_empty() {
1972 return Ok(());
1973 }
1974
1975 let mut rewritten_changes = HashSet::new();
1976 let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1977 for item in revset::walk_revs(self, new_heads, old_heads)
1978 .map_err(|err| err.into_backend_error())?
1979 .commit_change_ids()
1980 {
1981 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1982 if let Some(old_commits) = removed_changes.get(&change_id) {
1983 for old_commit in old_commits {
1984 rewritten_commits
1985 .entry(old_commit.clone())
1986 .or_default()
1987 .push(commit_id.clone());
1988 }
1989 }
1990 rewritten_changes.insert(change_id);
1991 }
1992 for (old_commit, new_commits) in rewritten_commits {
1993 if new_commits.len() == 1 {
1994 self.set_rewritten_commit(
1995 old_commit.clone(),
1996 new_commits.into_iter().next().unwrap(),
1997 );
1998 } else {
1999 self.set_divergent_rewrite(old_commit.clone(), new_commits);
2000 }
2001 }
2002
2003 for (change_id, removed_commit_ids) in &removed_changes {
2004 if !rewritten_changes.contains(change_id) {
2005 for id in removed_commit_ids {
2006 let commit = self.store().get_commit_async(id).await?;
2007 self.record_abandoned_commit(&commit);
2008 }
2009 }
2010 }
2011
2012 Ok(())
2013 }
2014}
2015
2016impl Repo for MutableRepo {
2017 fn base_repo(&self) -> &ReadonlyRepo {
2018 &self.base_repo
2019 }
2020
2021 fn store(&self) -> &Arc<Store> {
2022 self.base_repo.store()
2023 }
2024
2025 fn op_store(&self) -> &Arc<dyn OpStore> {
2026 self.base_repo.op_store()
2027 }
2028
2029 fn index(&self) -> &dyn Index {
2030 self.index.as_index()
2031 }
2032
2033 fn view(&self) -> &View {
2034 self.view
2035 .get_or_ensure_clean(|v| self.enforce_view_invariants(v))
2036 }
2037
2038 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
2039 self.base_repo.submodule_store()
2040 }
2041
2042 fn resolve_change_id_prefix(
2043 &self,
2044 prefix: &HexPrefix,
2045 ) -> IndexResult<PrefixResolution<ResolvedChangeTargets>> {
2046 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
2047 change_id_index.resolve_prefix(prefix)
2048 }
2049
2050 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> IndexResult<usize> {
2051 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
2052 change_id_index.shortest_unique_prefix_len(target_id)
2053 }
2054}
2055
2056#[derive(Debug, Error)]
2058#[error("Cannot rewrite the root commit")]
2059pub struct RewriteRootCommit;
2060
2061#[derive(Debug, Error)]
2063pub enum EditCommitError {
2064 #[error("Current working-copy commit not found")]
2065 WorkingCopyCommitNotFound(#[source] BackendError),
2066 #[error(transparent)]
2067 RewriteRootCommit(#[from] RewriteRootCommit),
2068 #[error(transparent)]
2069 BackendError(#[from] BackendError),
2070}
2071
2072#[derive(Debug, Error)]
2074pub enum CheckOutCommitError {
2075 #[error("Failed to create new working-copy commit")]
2076 CreateCommit(#[from] BackendError),
2077 #[error("Failed to edit commit")]
2078 EditCommit(#[from] EditCommitError),
2079}
2080
2081mod dirty_cell {
2082 use std::cell::OnceCell;
2083 use std::cell::RefCell;
2084
2085 #[derive(Clone, Debug)]
2089 pub struct DirtyCell<T> {
2090 clean: OnceCell<Box<T>>,
2093 dirty: RefCell<Option<Box<T>>>,
2094 }
2095
2096 impl<T> DirtyCell<T> {
2097 pub fn with_clean(value: T) -> Self {
2098 Self {
2099 clean: OnceCell::from(Box::new(value)),
2100 dirty: RefCell::new(None),
2101 }
2102 }
2103
2104 pub fn get_or_ensure_clean(&self, f: impl FnOnce(&mut T)) -> &T {
2105 self.clean.get_or_init(|| {
2106 let mut value = self.dirty.borrow_mut().take().unwrap();
2108 f(&mut value);
2109 value
2110 })
2111 }
2112
2113 pub fn ensure_clean(&self, f: impl FnOnce(&mut T)) {
2114 self.get_or_ensure_clean(f);
2115 }
2116
2117 pub fn into_inner(self) -> T {
2118 *self
2119 .clean
2120 .into_inner()
2121 .or_else(|| self.dirty.into_inner())
2122 .unwrap()
2123 }
2124
2125 pub fn with_ref<R>(&self, f: impl FnOnce(&T) -> R) -> R {
2126 if let Some(value) = self.clean.get() {
2127 f(value)
2128 } else {
2129 f(self.dirty.borrow().as_ref().unwrap())
2130 }
2131 }
2132
2133 pub fn get_mut(&mut self) -> &mut T {
2134 self.clean
2135 .get_mut()
2136 .or_else(|| self.dirty.get_mut().as_mut())
2137 .unwrap()
2138 }
2139
2140 pub fn mark_dirty(&mut self) {
2141 if let Some(value) = self.clean.take() {
2142 *self.dirty.get_mut() = Some(value);
2143 }
2144 }
2145 }
2146}