1#![expect(missing_docs)]
16
17use std::collections::BTreeMap;
18use std::collections::HashMap;
19use std::collections::HashSet;
20use std::collections::hash_map::Entry;
21use std::fmt::Debug;
22use std::fmt::Formatter;
23use std::fs;
24use std::path::Path;
25use std::slice;
26use std::sync::Arc;
27
28use itertools::Itertools as _;
29use once_cell::sync::OnceCell;
30use pollster::FutureExt as _;
31use thiserror::Error;
32use tracing::instrument;
33
34use self::dirty_cell::DirtyCell;
35use crate::backend::Backend;
36use crate::backend::BackendError;
37use crate::backend::BackendInitError;
38use crate::backend::BackendLoadError;
39use crate::backend::BackendResult;
40use crate::backend::ChangeId;
41use crate::backend::CommitId;
42use crate::commit::Commit;
43use crate::commit::CommitByCommitterTimestamp;
44use crate::commit_builder::CommitBuilder;
45use crate::commit_builder::DetachedCommitBuilder;
46use crate::dag_walk;
47use crate::default_index::DefaultIndexStore;
48use crate::default_index::DefaultMutableIndex;
49use crate::default_submodule_store::DefaultSubmoduleStore;
50use crate::file_util::IoResultExt as _;
51use crate::file_util::PathError;
52use crate::index::ChangeIdIndex;
53use crate::index::Index;
54use crate::index::IndexError;
55use crate::index::IndexResult;
56use crate::index::IndexStore;
57use crate::index::IndexStoreError;
58use crate::index::MutableIndex;
59use crate::index::ReadonlyIndex;
60use crate::index::ResolvedChangeTargets;
61use crate::merge::MergeBuilder;
62use crate::merge::SameChange;
63use crate::merge::trivial_merge;
64use crate::merged_tree::MergedTree;
65use crate::object_id::HexPrefix;
66use crate::object_id::PrefixResolution;
67use crate::op_heads_store;
68use crate::op_heads_store::OpHeadResolutionError;
69use crate::op_heads_store::OpHeadsStore;
70use crate::op_heads_store::OpHeadsStoreError;
71use crate::op_store;
72use crate::op_store::OpStore;
73use crate::op_store::OpStoreError;
74use crate::op_store::OpStoreResult;
75use crate::op_store::OperationId;
76use crate::op_store::RefTarget;
77use crate::op_store::RemoteRef;
78use crate::op_store::RemoteRefState;
79use crate::op_store::RootOperationData;
80use crate::operation::Operation;
81use crate::ref_name::GitRefName;
82use crate::ref_name::RefName;
83use crate::ref_name::RemoteName;
84use crate::ref_name::RemoteRefSymbol;
85use crate::ref_name::WorkspaceName;
86use crate::ref_name::WorkspaceNameBuf;
87use crate::refs::diff_named_commit_ids;
88use crate::refs::diff_named_ref_targets;
89use crate::refs::diff_named_remote_refs;
90use crate::refs::merge_ref_targets;
91use crate::refs::merge_remote_refs;
92use crate::revset;
93use crate::revset::RevsetEvaluationError;
94use crate::revset::RevsetExpression;
95use crate::revset::RevsetIteratorExt as _;
96use crate::rewrite::CommitRewriter;
97use crate::rewrite::RebaseOptions;
98use crate::rewrite::RebasedCommit;
99use crate::rewrite::RewriteRefsOptions;
100use crate::rewrite::merge_commit_trees;
101use crate::rewrite::rebase_commit_with_options;
102use crate::settings::UserSettings;
103use crate::signing::SignInitError;
104use crate::signing::Signer;
105use crate::simple_backend::SimpleBackend;
106use crate::simple_op_heads_store::SimpleOpHeadsStore;
107use crate::simple_op_store::SimpleOpStore;
108use crate::store::Store;
109use crate::submodule_store::SubmoduleStore;
110use crate::transaction::Transaction;
111use crate::transaction::TransactionCommitError;
112use crate::tree_merge::MergeOptions;
113use crate::view::RenameWorkspaceError;
114use crate::view::View;
115
116pub trait Repo {
117 fn base_repo(&self) -> &ReadonlyRepo;
120
121 fn store(&self) -> &Arc<Store>;
122
123 fn op_store(&self) -> &Arc<dyn OpStore>;
124
125 fn index(&self) -> &dyn Index;
126
127 fn view(&self) -> &View;
128
129 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore>;
130
131 fn resolve_change_id(
132 &self,
133 change_id: &ChangeId,
134 ) -> IndexResult<Option<ResolvedChangeTargets>> {
135 let prefix = HexPrefix::from_id(change_id);
137 match self.resolve_change_id_prefix(&prefix)? {
138 PrefixResolution::NoMatch => Ok(None),
139 PrefixResolution::SingleMatch(entries) => Ok(Some(entries)),
140 PrefixResolution::AmbiguousMatch => panic!("complete change_id should be unambiguous"),
141 }
142 }
143
144 fn resolve_change_id_prefix(
145 &self,
146 prefix: &HexPrefix,
147 ) -> IndexResult<PrefixResolution<ResolvedChangeTargets>>;
148
149 fn shortest_unique_change_id_prefix_len(
150 &self,
151 target_id_bytes: &ChangeId,
152 ) -> IndexResult<usize>;
153}
154
155pub struct ReadonlyRepo {
156 loader: RepoLoader,
157 operation: Operation,
158 index: Box<dyn ReadonlyIndex>,
159 change_id_index: OnceCell<Box<dyn ChangeIdIndex>>,
160 view: View,
162}
163
164impl Debug for ReadonlyRepo {
165 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
166 f.debug_struct("ReadonlyRepo")
167 .field("store", &self.loader.store)
168 .finish_non_exhaustive()
169 }
170}
171
172#[derive(Error, Debug)]
173pub enum RepoInitError {
174 #[error(transparent)]
175 Backend(#[from] BackendInitError),
176 #[error(transparent)]
177 OpHeadsStore(#[from] OpHeadsStoreError),
178 #[error(transparent)]
179 Path(#[from] PathError),
180}
181
182impl ReadonlyRepo {
183 pub fn default_op_store_initializer() -> &'static OpStoreInitializer<'static> {
184 &|_settings, store_path, root_data| {
185 Ok(Box::new(SimpleOpStore::init(store_path, root_data)?))
186 }
187 }
188
189 pub fn default_op_heads_store_initializer() -> &'static OpHeadsStoreInitializer<'static> {
190 &|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::init(store_path)?))
191 }
192
193 pub fn default_index_store_initializer() -> &'static IndexStoreInitializer<'static> {
194 &|_settings, store_path| Ok(Box::new(DefaultIndexStore::init(store_path)?))
195 }
196
197 pub fn default_submodule_store_initializer() -> &'static SubmoduleStoreInitializer<'static> {
198 &|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::init(store_path)))
199 }
200
201 #[expect(clippy::too_many_arguments)]
202 pub fn init(
203 settings: &UserSettings,
204 repo_path: &Path,
205 backend_initializer: &BackendInitializer,
206 signer: Signer,
207 op_store_initializer: &OpStoreInitializer,
208 op_heads_store_initializer: &OpHeadsStoreInitializer,
209 index_store_initializer: &IndexStoreInitializer,
210 submodule_store_initializer: &SubmoduleStoreInitializer,
211 ) -> Result<Arc<Self>, RepoInitError> {
212 let repo_path = dunce::canonicalize(repo_path).context(repo_path)?;
213
214 let store_path = repo_path.join("store");
215 fs::create_dir(&store_path).context(&store_path)?;
216 let backend = backend_initializer(settings, &store_path)?;
217 let backend_path = store_path.join("type");
218 fs::write(&backend_path, backend.name()).context(&backend_path)?;
219 let merge_options =
220 MergeOptions::from_settings(settings).map_err(|err| BackendInitError(err.into()))?;
221 let store = Store::new(backend, signer, merge_options);
222
223 let op_store_path = repo_path.join("op_store");
224 fs::create_dir(&op_store_path).context(&op_store_path)?;
225 let root_op_data = RootOperationData {
226 root_commit_id: store.root_commit_id().clone(),
227 };
228 let op_store = op_store_initializer(settings, &op_store_path, root_op_data)?;
229 let op_store_type_path = op_store_path.join("type");
230 fs::write(&op_store_type_path, op_store.name()).context(&op_store_type_path)?;
231 let op_store: Arc<dyn OpStore> = Arc::from(op_store);
232
233 let op_heads_path = repo_path.join("op_heads");
234 fs::create_dir(&op_heads_path).context(&op_heads_path)?;
235 let op_heads_store = op_heads_store_initializer(settings, &op_heads_path)?;
236 let op_heads_type_path = op_heads_path.join("type");
237 fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?;
238 op_heads_store
239 .update_op_heads(&[], op_store.root_operation_id())
240 .block_on()?;
241 let op_heads_store: Arc<dyn OpHeadsStore> = Arc::from(op_heads_store);
242
243 let index_path = repo_path.join("index");
244 fs::create_dir(&index_path).context(&index_path)?;
245 let index_store = index_store_initializer(settings, &index_path)?;
246 let index_type_path = index_path.join("type");
247 fs::write(&index_type_path, index_store.name()).context(&index_type_path)?;
248 let index_store: Arc<dyn IndexStore> = Arc::from(index_store);
249
250 let submodule_store_path = repo_path.join("submodule_store");
251 fs::create_dir(&submodule_store_path).context(&submodule_store_path)?;
252 let submodule_store = submodule_store_initializer(settings, &submodule_store_path)?;
253 let submodule_store_type_path = submodule_store_path.join("type");
254 fs::write(&submodule_store_type_path, submodule_store.name())
255 .context(&submodule_store_type_path)?;
256 let submodule_store = Arc::from(submodule_store);
257
258 let loader = RepoLoader {
259 settings: settings.clone(),
260 store,
261 op_store,
262 op_heads_store,
263 index_store,
264 submodule_store,
265 };
266
267 let root_operation = loader.root_operation();
268 let root_view = root_operation.view().expect("failed to read root view");
269 assert!(!root_view.heads().is_empty());
270 let index = loader
271 .index_store
272 .get_index_at_op(&root_operation, &loader.store)
273 .map_err(|err| BackendInitError(err.into()))?;
276 Ok(Arc::new(Self {
277 loader,
278 operation: root_operation,
279 index,
280 change_id_index: OnceCell::new(),
281 view: root_view,
282 }))
283 }
284
285 pub fn loader(&self) -> &RepoLoader {
286 &self.loader
287 }
288
289 pub fn op_id(&self) -> &OperationId {
290 self.operation.id()
291 }
292
293 pub fn operation(&self) -> &Operation {
294 &self.operation
295 }
296
297 pub fn view(&self) -> &View {
298 &self.view
299 }
300
301 pub fn readonly_index(&self) -> &dyn ReadonlyIndex {
302 self.index.as_ref()
303 }
304
305 fn change_id_index(&self) -> &dyn ChangeIdIndex {
306 self.change_id_index
307 .get_or_init(|| {
308 self.readonly_index()
309 .change_id_index(&mut self.view().heads().iter())
310 })
311 .as_ref()
312 }
313
314 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
315 self.loader.op_heads_store()
316 }
317
318 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
319 self.loader.index_store()
320 }
321
322 pub fn settings(&self) -> &UserSettings {
323 self.loader.settings()
324 }
325
326 pub fn start_transaction(self: &Arc<Self>) -> Transaction {
327 let mut_repo = MutableRepo::new(self.clone(), self.readonly_index(), &self.view);
328 Transaction::new(mut_repo, self.settings())
329 }
330
331 pub fn reload_at_head(&self) -> Result<Arc<Self>, RepoLoaderError> {
332 self.loader().load_at_head()
333 }
334
335 #[instrument]
336 pub fn reload_at(&self, operation: &Operation) -> Result<Arc<Self>, RepoLoaderError> {
337 self.loader().load_at(operation)
338 }
339}
340
341impl Repo for ReadonlyRepo {
342 fn base_repo(&self) -> &ReadonlyRepo {
343 self
344 }
345
346 fn store(&self) -> &Arc<Store> {
347 self.loader.store()
348 }
349
350 fn op_store(&self) -> &Arc<dyn OpStore> {
351 self.loader.op_store()
352 }
353
354 fn index(&self) -> &dyn Index {
355 self.readonly_index().as_index()
356 }
357
358 fn view(&self) -> &View {
359 &self.view
360 }
361
362 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
363 self.loader.submodule_store()
364 }
365
366 fn resolve_change_id_prefix(
367 &self,
368 prefix: &HexPrefix,
369 ) -> IndexResult<PrefixResolution<ResolvedChangeTargets>> {
370 self.change_id_index().resolve_prefix(prefix)
371 }
372
373 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> IndexResult<usize> {
374 self.change_id_index().shortest_unique_prefix_len(target_id)
375 }
376}
377
378pub type BackendInitializer<'a> =
379 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendInitError> + 'a;
380#[rustfmt::skip] pub type OpStoreInitializer<'a> =
382 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendInitError>
383 + 'a;
384pub type OpHeadsStoreInitializer<'a> =
385 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendInitError> + 'a;
386pub type IndexStoreInitializer<'a> =
387 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendInitError> + 'a;
388pub type SubmoduleStoreInitializer<'a> =
389 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendInitError> + 'a;
390
391type BackendFactory =
392 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendLoadError>>;
393type OpStoreFactory = Box<
394 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendLoadError>,
395>;
396type OpHeadsStoreFactory =
397 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendLoadError>>;
398type IndexStoreFactory =
399 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendLoadError>>;
400type SubmoduleStoreFactory =
401 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendLoadError>>;
402
403pub fn merge_factories_map<F>(base: &mut HashMap<String, F>, ext: HashMap<String, F>) {
404 for (name, factory) in ext {
405 match base.entry(name) {
406 Entry::Vacant(v) => {
407 v.insert(factory);
408 }
409 Entry::Occupied(o) => {
410 panic!("Conflicting factory definitions for '{}' factory", o.key())
411 }
412 }
413 }
414}
415
416pub struct StoreFactories {
417 backend_factories: HashMap<String, BackendFactory>,
418 op_store_factories: HashMap<String, OpStoreFactory>,
419 op_heads_store_factories: HashMap<String, OpHeadsStoreFactory>,
420 index_store_factories: HashMap<String, IndexStoreFactory>,
421 submodule_store_factories: HashMap<String, SubmoduleStoreFactory>,
422}
423
424impl Default for StoreFactories {
425 fn default() -> Self {
426 let mut factories = Self::empty();
427
428 factories.add_backend(
430 SimpleBackend::name(),
431 Box::new(|_settings, store_path| Ok(Box::new(SimpleBackend::load(store_path)))),
432 );
433 #[cfg(feature = "git")]
434 factories.add_backend(
435 crate::git_backend::GitBackend::name(),
436 Box::new(|settings, store_path| {
437 Ok(Box::new(crate::git_backend::GitBackend::load(
438 settings, store_path,
439 )?))
440 }),
441 );
442 #[cfg(feature = "testing")]
443 factories.add_backend(
444 crate::secret_backend::SecretBackend::name(),
445 Box::new(|settings, store_path| {
446 Ok(Box::new(crate::secret_backend::SecretBackend::load(
447 settings, store_path,
448 )?))
449 }),
450 );
451
452 factories.add_op_store(
454 SimpleOpStore::name(),
455 Box::new(|_settings, store_path, root_data| {
456 Ok(Box::new(SimpleOpStore::load(store_path, root_data)))
457 }),
458 );
459
460 factories.add_op_heads_store(
462 SimpleOpHeadsStore::name(),
463 Box::new(|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::load(store_path)))),
464 );
465
466 factories.add_index_store(
468 DefaultIndexStore::name(),
469 Box::new(|_settings, store_path| Ok(Box::new(DefaultIndexStore::load(store_path)))),
470 );
471
472 factories.add_submodule_store(
474 DefaultSubmoduleStore::name(),
475 Box::new(|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::load(store_path)))),
476 );
477
478 factories
479 }
480}
481
482#[derive(Debug, Error)]
483pub enum StoreLoadError {
484 #[error("Unsupported {store} backend type '{store_type}'")]
485 UnsupportedType {
486 store: &'static str,
487 store_type: String,
488 },
489 #[error("Failed to read {store} backend type")]
490 ReadError {
491 store: &'static str,
492 source: PathError,
493 },
494 #[error(transparent)]
495 Backend(#[from] BackendLoadError),
496 #[error(transparent)]
497 Signing(#[from] SignInitError),
498}
499
500impl StoreFactories {
501 pub fn empty() -> Self {
502 Self {
503 backend_factories: HashMap::new(),
504 op_store_factories: HashMap::new(),
505 op_heads_store_factories: HashMap::new(),
506 index_store_factories: HashMap::new(),
507 submodule_store_factories: HashMap::new(),
508 }
509 }
510
511 pub fn merge(&mut self, ext: Self) {
512 let Self {
513 backend_factories,
514 op_store_factories,
515 op_heads_store_factories,
516 index_store_factories,
517 submodule_store_factories,
518 } = ext;
519
520 merge_factories_map(&mut self.backend_factories, backend_factories);
521 merge_factories_map(&mut self.op_store_factories, op_store_factories);
522 merge_factories_map(&mut self.op_heads_store_factories, op_heads_store_factories);
523 merge_factories_map(&mut self.index_store_factories, index_store_factories);
524 merge_factories_map(
525 &mut self.submodule_store_factories,
526 submodule_store_factories,
527 );
528 }
529
530 pub fn add_backend(&mut self, name: &str, factory: BackendFactory) {
531 self.backend_factories.insert(name.to_string(), factory);
532 }
533
534 pub fn load_backend(
535 &self,
536 settings: &UserSettings,
537 store_path: &Path,
538 ) -> Result<Box<dyn Backend>, StoreLoadError> {
539 let backend_type = read_store_type("commit", store_path.join("type"))?;
540 let backend_factory = self.backend_factories.get(&backend_type).ok_or_else(|| {
541 StoreLoadError::UnsupportedType {
542 store: "commit",
543 store_type: backend_type.clone(),
544 }
545 })?;
546 Ok(backend_factory(settings, store_path)?)
547 }
548
549 pub fn add_op_store(&mut self, name: &str, factory: OpStoreFactory) {
550 self.op_store_factories.insert(name.to_string(), factory);
551 }
552
553 pub fn load_op_store(
554 &self,
555 settings: &UserSettings,
556 store_path: &Path,
557 root_data: RootOperationData,
558 ) -> Result<Box<dyn OpStore>, StoreLoadError> {
559 let op_store_type = read_store_type("operation", store_path.join("type"))?;
560 let op_store_factory = self.op_store_factories.get(&op_store_type).ok_or_else(|| {
561 StoreLoadError::UnsupportedType {
562 store: "operation",
563 store_type: op_store_type.clone(),
564 }
565 })?;
566 Ok(op_store_factory(settings, store_path, root_data)?)
567 }
568
569 pub fn add_op_heads_store(&mut self, name: &str, factory: OpHeadsStoreFactory) {
570 self.op_heads_store_factories
571 .insert(name.to_string(), factory);
572 }
573
574 pub fn load_op_heads_store(
575 &self,
576 settings: &UserSettings,
577 store_path: &Path,
578 ) -> Result<Box<dyn OpHeadsStore>, StoreLoadError> {
579 let op_heads_store_type = read_store_type("operation heads", store_path.join("type"))?;
580 let op_heads_store_factory = self
581 .op_heads_store_factories
582 .get(&op_heads_store_type)
583 .ok_or_else(|| StoreLoadError::UnsupportedType {
584 store: "operation heads",
585 store_type: op_heads_store_type.clone(),
586 })?;
587 Ok(op_heads_store_factory(settings, store_path)?)
588 }
589
590 pub fn add_index_store(&mut self, name: &str, factory: IndexStoreFactory) {
591 self.index_store_factories.insert(name.to_string(), factory);
592 }
593
594 pub fn load_index_store(
595 &self,
596 settings: &UserSettings,
597 store_path: &Path,
598 ) -> Result<Box<dyn IndexStore>, StoreLoadError> {
599 let index_store_type = read_store_type("index", store_path.join("type"))?;
600 let index_store_factory = self
601 .index_store_factories
602 .get(&index_store_type)
603 .ok_or_else(|| StoreLoadError::UnsupportedType {
604 store: "index",
605 store_type: index_store_type.clone(),
606 })?;
607 Ok(index_store_factory(settings, store_path)?)
608 }
609
610 pub fn add_submodule_store(&mut self, name: &str, factory: SubmoduleStoreFactory) {
611 self.submodule_store_factories
612 .insert(name.to_string(), factory);
613 }
614
615 pub fn load_submodule_store(
616 &self,
617 settings: &UserSettings,
618 store_path: &Path,
619 ) -> Result<Box<dyn SubmoduleStore>, StoreLoadError> {
620 let submodule_store_type = read_store_type("submodule_store", store_path.join("type"))?;
621 let submodule_store_factory = self
622 .submodule_store_factories
623 .get(&submodule_store_type)
624 .ok_or_else(|| StoreLoadError::UnsupportedType {
625 store: "submodule_store",
626 store_type: submodule_store_type.clone(),
627 })?;
628
629 Ok(submodule_store_factory(settings, store_path)?)
630 }
631}
632
633pub fn read_store_type(
634 store: &'static str,
635 path: impl AsRef<Path>,
636) -> Result<String, StoreLoadError> {
637 let path = path.as_ref();
638 fs::read_to_string(path)
639 .context(path)
640 .map_err(|source| StoreLoadError::ReadError { store, source })
641}
642
643#[derive(Debug, Error)]
644pub enum RepoLoaderError {
645 #[error(transparent)]
646 Backend(#[from] BackendError),
647 #[error(transparent)]
648 Index(#[from] IndexError),
649 #[error(transparent)]
650 IndexStore(#[from] IndexStoreError),
651 #[error(transparent)]
652 OpHeadResolution(#[from] OpHeadResolutionError),
653 #[error(transparent)]
654 OpHeadsStoreError(#[from] OpHeadsStoreError),
655 #[error(transparent)]
656 OpStore(#[from] OpStoreError),
657 #[error(transparent)]
658 TransactionCommit(#[from] TransactionCommitError),
659}
660
661#[derive(Clone)]
664pub struct RepoLoader {
665 settings: UserSettings,
666 store: Arc<Store>,
667 op_store: Arc<dyn OpStore>,
668 op_heads_store: Arc<dyn OpHeadsStore>,
669 index_store: Arc<dyn IndexStore>,
670 submodule_store: Arc<dyn SubmoduleStore>,
671}
672
673impl RepoLoader {
674 pub fn new(
675 settings: UserSettings,
676 store: Arc<Store>,
677 op_store: Arc<dyn OpStore>,
678 op_heads_store: Arc<dyn OpHeadsStore>,
679 index_store: Arc<dyn IndexStore>,
680 submodule_store: Arc<dyn SubmoduleStore>,
681 ) -> Self {
682 Self {
683 settings,
684 store,
685 op_store,
686 op_heads_store,
687 index_store,
688 submodule_store,
689 }
690 }
691
692 pub fn init_from_file_system(
696 settings: &UserSettings,
697 repo_path: &Path,
698 store_factories: &StoreFactories,
699 ) -> Result<Self, StoreLoadError> {
700 let merge_options =
701 MergeOptions::from_settings(settings).map_err(|err| BackendLoadError(err.into()))?;
702 let store = Store::new(
703 store_factories.load_backend(settings, &repo_path.join("store"))?,
704 Signer::from_settings(settings)?,
705 merge_options,
706 );
707 let root_op_data = RootOperationData {
708 root_commit_id: store.root_commit_id().clone(),
709 };
710 let op_store = Arc::from(store_factories.load_op_store(
711 settings,
712 &repo_path.join("op_store"),
713 root_op_data,
714 )?);
715 let op_heads_store =
716 Arc::from(store_factories.load_op_heads_store(settings, &repo_path.join("op_heads"))?);
717 let index_store =
718 Arc::from(store_factories.load_index_store(settings, &repo_path.join("index"))?);
719 let submodule_store = Arc::from(
720 store_factories.load_submodule_store(settings, &repo_path.join("submodule_store"))?,
721 );
722 Ok(Self {
723 settings: settings.clone(),
724 store,
725 op_store,
726 op_heads_store,
727 index_store,
728 submodule_store,
729 })
730 }
731
732 pub fn settings(&self) -> &UserSettings {
733 &self.settings
734 }
735
736 pub fn store(&self) -> &Arc<Store> {
737 &self.store
738 }
739
740 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
741 &self.index_store
742 }
743
744 pub fn op_store(&self) -> &Arc<dyn OpStore> {
745 &self.op_store
746 }
747
748 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
749 &self.op_heads_store
750 }
751
752 pub fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
753 &self.submodule_store
754 }
755
756 pub fn load_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
757 let op = op_heads_store::resolve_op_heads(
758 self.op_heads_store.as_ref(),
759 &self.op_store,
760 |op_heads| self.resolve_op_heads(op_heads),
761 )?;
762 let view = op.view()?;
763 self.finish_load(op, view)
764 }
765
766 #[instrument(skip(self))]
767 pub fn load_at(&self, op: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
768 let view = op.view()?;
769 self.finish_load(op.clone(), view)
770 }
771
772 pub fn create_from(
773 &self,
774 operation: Operation,
775 view: View,
776 index: Box<dyn ReadonlyIndex>,
777 ) -> Arc<ReadonlyRepo> {
778 let repo = ReadonlyRepo {
779 loader: self.clone(),
780 operation,
781 index,
782 change_id_index: OnceCell::new(),
783 view,
784 };
785 Arc::new(repo)
786 }
787
788 pub fn root_operation(&self) -> Operation {
793 self.load_operation(self.op_store.root_operation_id())
794 .expect("failed to read root operation")
795 }
796
797 pub fn load_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
799 let data = self.op_store.read_operation(id).block_on()?;
800 Ok(Operation::new(self.op_store.clone(), id.clone(), data))
801 }
802
803 pub fn merge_operations(
806 &self,
807 operations: Vec<Operation>,
808 tx_description: Option<&str>,
809 ) -> Result<Operation, RepoLoaderError> {
810 let num_operations = operations.len();
811 let mut operations = operations.into_iter();
812 let Some(base_op) = operations.next() else {
813 return Ok(self.root_operation());
814 };
815 let final_op = if num_operations > 1 {
816 let base_repo = self.load_at(&base_op)?;
817 let mut tx = base_repo.start_transaction();
818 for other_op in operations {
819 tx.merge_operation(other_op)?;
820 tx.repo_mut().rebase_descendants()?;
821 }
822 let tx_description = tx_description.map_or_else(
823 || format!("merge {num_operations} operations"),
824 |tx_description| tx_description.to_string(),
825 );
826 let merged_repo = tx.write(tx_description)?.leave_unpublished();
827 merged_repo.operation().clone()
828 } else {
829 base_op
830 };
831
832 Ok(final_op)
833 }
834
835 fn resolve_op_heads(&self, op_heads: Vec<Operation>) -> Result<Operation, RepoLoaderError> {
836 assert!(!op_heads.is_empty());
837 self.merge_operations(op_heads, Some("reconcile divergent operations"))
838 }
839
840 fn finish_load(
841 &self,
842 operation: Operation,
843 view: View,
844 ) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
845 let index = self.index_store.get_index_at_op(&operation, &self.store)?;
846 let repo = ReadonlyRepo {
847 loader: self.clone(),
848 operation,
849 index,
850 change_id_index: OnceCell::new(),
851 view,
852 };
853 Ok(Arc::new(repo))
854 }
855}
856
857#[derive(Clone, Debug, PartialEq, Eq)]
858enum Rewrite {
859 Rewritten(CommitId),
862 Divergent(Vec<CommitId>),
865 Abandoned(Vec<CommitId>),
868}
869
870impl Rewrite {
871 fn new_parent_ids(&self) -> &[CommitId] {
872 match self {
873 Self::Rewritten(new_parent_id) => std::slice::from_ref(new_parent_id),
874 Self::Divergent(new_parent_ids) => new_parent_ids.as_slice(),
875 Self::Abandoned(new_parent_ids) => new_parent_ids.as_slice(),
876 }
877 }
878}
879
880pub struct MutableRepo {
881 base_repo: Arc<ReadonlyRepo>,
882 index: Box<dyn MutableIndex>,
883 view: DirtyCell<View>,
884 commit_predecessors: BTreeMap<CommitId, Vec<CommitId>>,
889 parent_mapping: HashMap<CommitId, Rewrite>,
898}
899
900impl MutableRepo {
901 pub fn new(base_repo: Arc<ReadonlyRepo>, index: &dyn ReadonlyIndex, view: &View) -> Self {
902 let mut_view = view.clone();
903 let mut_index = index.start_modification();
904 Self {
905 base_repo,
906 index: mut_index,
907 view: DirtyCell::with_clean(mut_view),
908 commit_predecessors: Default::default(),
909 parent_mapping: Default::default(),
910 }
911 }
912
913 pub fn base_repo(&self) -> &Arc<ReadonlyRepo> {
914 &self.base_repo
915 }
916
917 fn view_mut(&mut self) -> &mut View {
918 self.view.get_mut()
919 }
920
921 pub fn mutable_index(&self) -> &dyn MutableIndex {
922 self.index.as_ref()
923 }
924
925 pub(crate) fn is_backed_by_default_index(&self) -> bool {
926 self.index.downcast_ref::<DefaultMutableIndex>().is_some()
927 }
928
929 pub fn has_changes(&self) -> bool {
930 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
931 !(self.commit_predecessors.is_empty()
932 && self.parent_mapping.is_empty()
933 && self.view() == &self.base_repo.view)
934 }
935
936 pub(crate) fn consume(
937 self,
938 ) -> (
939 Box<dyn MutableIndex>,
940 View,
941 BTreeMap<CommitId, Vec<CommitId>>,
942 ) {
943 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
944 (self.index, self.view.into_inner(), self.commit_predecessors)
945 }
946
947 pub fn new_commit(&mut self, parents: Vec<CommitId>, tree: MergedTree) -> CommitBuilder<'_> {
949 let settings = self.base_repo.settings();
950 DetachedCommitBuilder::for_new_commit(self, settings, parents, tree).attach(self)
951 }
952
953 pub fn rewrite_commit(&mut self, predecessor: &Commit) -> CommitBuilder<'_> {
955 let settings = self.base_repo.settings();
956 DetachedCommitBuilder::for_rewrite_from(self, settings, predecessor).attach(self)
957 }
960
961 pub(crate) fn set_predecessors(&mut self, id: CommitId, predecessors: Vec<CommitId>) {
962 self.commit_predecessors.insert(id, predecessors);
963 }
964
965 pub fn set_rewritten_commit(&mut self, old_id: CommitId, new_id: CommitId) {
972 assert_ne!(old_id, *self.store().root_commit_id());
973 self.parent_mapping
974 .insert(old_id, Rewrite::Rewritten(new_id));
975 }
976
977 pub fn set_divergent_rewrite(
985 &mut self,
986 old_id: CommitId,
987 new_ids: impl IntoIterator<Item = CommitId>,
988 ) {
989 assert_ne!(old_id, *self.store().root_commit_id());
990 self.parent_mapping.insert(
991 old_id.clone(),
992 Rewrite::Divergent(new_ids.into_iter().collect()),
993 );
994 }
995
996 pub fn record_abandoned_commit(&mut self, old_commit: &Commit) {
1006 assert_ne!(old_commit.id(), self.store().root_commit_id());
1007 self.record_abandoned_commit_with_parents(
1009 old_commit.id().clone(),
1010 old_commit.parent_ids().iter().cloned(),
1011 );
1012 }
1013
1014 pub fn record_abandoned_commit_with_parents(
1020 &mut self,
1021 old_id: CommitId,
1022 new_parent_ids: impl IntoIterator<Item = CommitId>,
1023 ) {
1024 assert_ne!(old_id, *self.store().root_commit_id());
1025 self.parent_mapping.insert(
1026 old_id,
1027 Rewrite::Abandoned(new_parent_ids.into_iter().collect()),
1028 );
1029 }
1030
1031 pub fn has_rewrites(&self) -> bool {
1032 !self.parent_mapping.is_empty()
1033 }
1034
1035 pub fn new_parents(&self, old_ids: &[CommitId]) -> Vec<CommitId> {
1042 self.rewritten_ids_with(old_ids, |rewrite| !matches!(rewrite, Rewrite::Divergent(_)))
1043 }
1044
1045 fn rewritten_ids_with(
1046 &self,
1047 old_ids: &[CommitId],
1048 mut predicate: impl FnMut(&Rewrite) -> bool,
1049 ) -> Vec<CommitId> {
1050 assert!(!old_ids.is_empty());
1051 let mut new_ids = Vec::with_capacity(old_ids.len());
1052 let mut to_visit = old_ids.iter().rev().collect_vec();
1053 let mut visited = HashSet::new();
1054 while let Some(id) = to_visit.pop() {
1055 if !visited.insert(id) {
1056 continue;
1057 }
1058 match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1059 None => {
1060 new_ids.push(id.clone());
1061 }
1062 Some(rewrite) => {
1063 let replacements = rewrite.new_parent_ids();
1064 assert!(
1065 !replacements.is_empty(),
1070 "Found empty value for key {id:?} in the parent mapping",
1071 );
1072 to_visit.extend(replacements.iter().rev());
1073 }
1074 }
1075 }
1076 assert!(
1077 !new_ids.is_empty(),
1078 "new ids become empty because of cycle in the parent mapping"
1079 );
1080 debug_assert!(new_ids.iter().all_unique());
1081 new_ids
1082 }
1083
1084 fn resolve_rewrite_mapping_with(
1088 &self,
1089 mut predicate: impl FnMut(&Rewrite) -> bool,
1090 ) -> BackendResult<HashMap<CommitId, Vec<CommitId>>> {
1091 let sorted_ids = dag_walk::topo_order_forward(
1092 self.parent_mapping.keys(),
1093 |&id| id,
1094 |&id| match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1095 None => &[],
1096 Some(rewrite) => rewrite.new_parent_ids(),
1097 },
1098 |id| {
1099 BackendError::Other(
1100 format!("Cycle between rewritten commits involving commit {id}").into(),
1101 )
1102 },
1103 )?;
1104 let mut new_mapping: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1105 for old_id in sorted_ids {
1106 let Some(rewrite) = self.parent_mapping.get(old_id).filter(|&v| predicate(v)) else {
1107 continue;
1108 };
1109 let lookup = |id| new_mapping.get(id).map_or(slice::from_ref(id), |ids| ids);
1110 let new_ids = match rewrite.new_parent_ids() {
1111 [id] => lookup(id).to_vec(), ids => ids.iter().flat_map(lookup).unique().cloned().collect(),
1113 };
1114 debug_assert_eq!(
1115 new_ids,
1116 self.rewritten_ids_with(slice::from_ref(old_id), &mut predicate)
1117 );
1118 new_mapping.insert(old_id.clone(), new_ids);
1119 }
1120 Ok(new_mapping)
1121 }
1122
1123 pub fn update_rewritten_references(
1126 &mut self,
1127 options: &RewriteRefsOptions,
1128 ) -> BackendResult<()> {
1129 self.update_all_references(options)?;
1130 self.update_heads()
1131 .map_err(|err| err.into_backend_error())?;
1132 Ok(())
1133 }
1134
1135 fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> {
1136 let rewrite_mapping = self.resolve_rewrite_mapping_with(|_| true)?;
1137 self.update_local_bookmarks(&rewrite_mapping, options)
1138 .map_err(|err| BackendError::Other(err.into()))?;
1140 self.update_wc_commits(&rewrite_mapping)?;
1141 Ok(())
1142 }
1143
1144 fn update_local_bookmarks(
1145 &mut self,
1146 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1147 options: &RewriteRefsOptions,
1148 ) -> IndexResult<()> {
1149 let changed_branches = self
1150 .view()
1151 .local_bookmarks()
1152 .flat_map(|(name, target)| {
1153 target.added_ids().filter_map(|id| {
1154 let change = rewrite_mapping.get_key_value(id)?;
1155 Some((name.to_owned(), change))
1156 })
1157 })
1158 .collect_vec();
1159 for (bookmark_name, (old_commit_id, new_commit_ids)) in changed_branches {
1160 let should_delete = options.delete_abandoned_bookmarks
1161 && matches!(
1162 self.parent_mapping.get(old_commit_id),
1163 Some(Rewrite::Abandoned(_))
1164 );
1165 let old_target = RefTarget::normal(old_commit_id.clone());
1166 let new_target = if should_delete {
1167 RefTarget::absent()
1168 } else {
1169 let ids = itertools::intersperse(new_commit_ids, old_commit_id)
1170 .map(|id| Some(id.clone()));
1171 RefTarget::from_merge(MergeBuilder::from_iter(ids).build())
1172 };
1173
1174 self.merge_local_bookmark(&bookmark_name, &old_target, &new_target)?;
1175 }
1176 Ok(())
1177 }
1178
1179 fn update_wc_commits(
1180 &mut self,
1181 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1182 ) -> BackendResult<()> {
1183 let changed_wc_commits = self
1184 .view()
1185 .wc_commit_ids()
1186 .iter()
1187 .filter_map(|(name, commit_id)| {
1188 let change = rewrite_mapping.get_key_value(commit_id)?;
1189 Some((name.to_owned(), change))
1190 })
1191 .collect_vec();
1192 let mut recreated_wc_commits: HashMap<&CommitId, Commit> = HashMap::new();
1193 for (name, (old_commit_id, new_commit_ids)) in changed_wc_commits {
1194 let abandoned_old_commit = matches!(
1195 self.parent_mapping.get(old_commit_id),
1196 Some(Rewrite::Abandoned(_))
1197 );
1198 let new_wc_commit = if !abandoned_old_commit {
1199 self.store().get_commit(&new_commit_ids[0])?
1201 } else if let Some(commit) = recreated_wc_commits.get(old_commit_id) {
1202 commit.clone()
1203 } else {
1204 let new_commits: Vec<_> = new_commit_ids
1205 .iter()
1206 .map(|id| self.store().get_commit(id))
1207 .try_collect()?;
1208 let merged_parents_tree = merge_commit_trees(self, &new_commits).block_on()?;
1209 let commit = self
1210 .new_commit(new_commit_ids.clone(), merged_parents_tree)
1211 .write()?;
1212 recreated_wc_commits.insert(old_commit_id, commit.clone());
1213 commit
1214 };
1215 self.edit(name, &new_wc_commit).map_err(|err| match err {
1216 EditCommitError::BackendError(backend_error) => backend_error,
1217 EditCommitError::WorkingCopyCommitNotFound(_)
1218 | EditCommitError::RewriteRootCommit(_) => panic!("unexpected error: {err:?}"),
1219 })?;
1220 }
1221 Ok(())
1222 }
1223
1224 fn update_heads(&mut self) -> Result<(), RevsetEvaluationError> {
1225 let old_commits_expression =
1226 RevsetExpression::commits(self.parent_mapping.keys().cloned().collect());
1227 let heads_to_add_expression = old_commits_expression
1228 .parents()
1229 .minus(&old_commits_expression);
1230 let heads_to_add: Vec<_> = heads_to_add_expression
1231 .evaluate(self)?
1232 .iter()
1233 .try_collect()?;
1234
1235 let mut view = self.view().store_view().clone();
1236 for commit_id in self.parent_mapping.keys() {
1237 view.head_ids.remove(commit_id);
1238 }
1239 view.head_ids.extend(heads_to_add);
1240 self.set_view(view);
1241 Ok(())
1242 }
1243
1244 pub fn find_descendants_for_rebase(&self, roots: Vec<CommitId>) -> BackendResult<Vec<Commit>> {
1247 let to_visit_revset = RevsetExpression::commits(roots)
1248 .descendants()
1249 .minus(&RevsetExpression::commits(
1250 self.parent_mapping.keys().cloned().collect(),
1251 ))
1252 .evaluate(self)
1253 .map_err(|err| err.into_backend_error())?;
1254 let to_visit = to_visit_revset
1255 .iter()
1256 .commits(self.store())
1257 .try_collect()
1258 .map_err(|err| err.into_backend_error())?;
1259 Ok(to_visit)
1260 }
1261
1262 fn order_commits_for_rebase(
1265 &self,
1266 to_visit: Vec<Commit>,
1267 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1268 ) -> BackendResult<Vec<Commit>> {
1269 let to_visit_set: HashSet<CommitId> =
1270 to_visit.iter().map(|commit| commit.id().clone()).collect();
1271 let mut visited = HashSet::new();
1272 let store = self.store();
1275 dag_walk::topo_order_reverse_ok(
1276 to_visit.into_iter().map(Ok),
1277 |commit| commit.id().clone(),
1278 |commit| -> Vec<BackendResult<Commit>> {
1279 visited.insert(commit.id().clone());
1280 let mut dependents = vec![];
1281 let parent_ids = new_parents_map
1282 .get(commit.id())
1283 .map_or(commit.parent_ids(), |parent_ids| parent_ids);
1284 for parent_id in parent_ids {
1285 let parent = store.get_commit(parent_id);
1286 let Ok(parent) = parent else {
1287 dependents.push(parent);
1288 continue;
1289 };
1290 if let Some(rewrite) = self.parent_mapping.get(parent.id()) {
1291 for target in rewrite.new_parent_ids() {
1292 if to_visit_set.contains(target) && !visited.contains(target) {
1293 dependents.push(store.get_commit(target));
1294 }
1295 }
1296 }
1297 if to_visit_set.contains(parent.id()) {
1298 dependents.push(Ok(parent));
1299 }
1300 }
1301 dependents
1302 },
1303 |_| panic!("graph has cycle"),
1304 )
1305 }
1306
1307 pub fn transform_descendants(
1319 &mut self,
1320 roots: Vec<CommitId>,
1321 callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1322 ) -> BackendResult<()> {
1323 let options = RewriteRefsOptions::default();
1324 self.transform_descendants_with_options(roots, &HashMap::new(), &options, callback)
1325 }
1326
1327 pub fn transform_descendants_with_options(
1335 &mut self,
1336 roots: Vec<CommitId>,
1337 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1338 options: &RewriteRefsOptions,
1339 callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1340 ) -> BackendResult<()> {
1341 let descendants = self.find_descendants_for_rebase(roots)?;
1342 self.transform_commits(descendants, new_parents_map, options, callback)
1343 }
1344
1345 pub fn transform_commits(
1353 &mut self,
1354 commits: Vec<Commit>,
1355 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1356 options: &RewriteRefsOptions,
1357 mut callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1358 ) -> BackendResult<()> {
1359 let mut to_visit = self.order_commits_for_rebase(commits, new_parents_map)?;
1360 while let Some(old_commit) = to_visit.pop() {
1361 let parent_ids = new_parents_map
1362 .get(old_commit.id())
1363 .map_or(old_commit.parent_ids(), |parent_ids| parent_ids);
1364 let new_parent_ids = self.new_parents(parent_ids);
1365 let rewriter = CommitRewriter::new(self, old_commit, new_parent_ids);
1366 callback(rewriter).block_on()?;
1367 }
1368 self.update_rewritten_references(options)?;
1369 Ok(())
1379 }
1380
1381 pub fn rebase_descendants_with_options(
1397 &mut self,
1398 options: &RebaseOptions,
1399 mut progress: impl FnMut(Commit, RebasedCommit),
1400 ) -> BackendResult<()> {
1401 let roots = self.parent_mapping.keys().cloned().collect();
1402 self.transform_descendants_with_options(
1403 roots,
1404 &HashMap::new(),
1405 &options.rewrite_refs,
1406 async |rewriter| {
1407 if rewriter.parents_changed() {
1408 let old_commit = rewriter.old_commit().clone();
1409 let rebased_commit = rebase_commit_with_options(rewriter, options)?;
1410 progress(old_commit, rebased_commit);
1411 }
1412 Ok(())
1413 },
1414 )?;
1415 self.parent_mapping.clear();
1416 Ok(())
1417 }
1418
1419 pub fn rebase_descendants(&mut self) -> BackendResult<usize> {
1429 let options = RebaseOptions::default();
1430 let mut num_rebased = 0;
1431 self.rebase_descendants_with_options(&options, |_old_commit, _rebased_commit| {
1432 num_rebased += 1;
1433 })?;
1434 Ok(num_rebased)
1435 }
1436
1437 pub fn reparent_descendants(&mut self) -> BackendResult<usize> {
1444 let roots = self.parent_mapping.keys().cloned().collect_vec();
1445 let mut num_reparented = 0;
1446 self.transform_descendants(roots, async |rewriter| {
1447 if rewriter.parents_changed() {
1448 let builder = rewriter.reparent();
1449 builder.write()?;
1450 num_reparented += 1;
1451 }
1452 Ok(())
1453 })?;
1454 self.parent_mapping.clear();
1455 Ok(num_reparented)
1456 }
1457
1458 pub fn set_wc_commit(
1459 &mut self,
1460 name: WorkspaceNameBuf,
1461 commit_id: CommitId,
1462 ) -> Result<(), RewriteRootCommit> {
1463 if &commit_id == self.store().root_commit_id() {
1464 return Err(RewriteRootCommit);
1465 }
1466 self.view_mut().set_wc_commit(name, commit_id);
1467 Ok(())
1468 }
1469
1470 pub fn remove_wc_commit(&mut self, name: &WorkspaceName) -> Result<(), EditCommitError> {
1471 self.maybe_abandon_wc_commit(name)?;
1472 self.view_mut().remove_wc_commit(name);
1473 Ok(())
1474 }
1475
1476 fn merge_wc_commit(
1479 &mut self,
1480 name: &WorkspaceName,
1481 base_id: Option<&CommitId>,
1482 other_id: Option<&CommitId>,
1483 ) {
1484 let view = self.view.get_mut();
1485 let self_id = view.get_wc_commit_id(name);
1486 let new_id = if let Some(resolved) =
1490 trivial_merge(&[self_id, base_id, other_id], SameChange::Accept)
1491 {
1492 resolved.cloned()
1493 } else if self_id.is_none() || other_id.is_none() {
1494 None
1497 } else {
1498 self_id.cloned()
1499 };
1500 match new_id {
1501 Some(id) => view.set_wc_commit(name.to_owned(), id),
1502 None => view.remove_wc_commit(name),
1503 }
1504 }
1505
1506 pub fn rename_workspace(
1507 &mut self,
1508 old_name: &WorkspaceName,
1509 new_name: WorkspaceNameBuf,
1510 ) -> Result<(), RenameWorkspaceError> {
1511 self.view_mut().rename_workspace(old_name, new_name)
1512 }
1513
1514 pub fn check_out(
1515 &mut self,
1516 name: WorkspaceNameBuf,
1517 commit: &Commit,
1518 ) -> Result<Commit, CheckOutCommitError> {
1519 let wc_commit = self
1520 .new_commit(vec![commit.id().clone()], commit.tree())
1521 .write()?;
1522 self.edit(name, &wc_commit)?;
1523 Ok(wc_commit)
1524 }
1525
1526 pub fn edit(&mut self, name: WorkspaceNameBuf, commit: &Commit) -> Result<(), EditCommitError> {
1527 self.maybe_abandon_wc_commit(&name)?;
1528 self.add_head(commit)?;
1529 Ok(self.set_wc_commit(name, commit.id().clone())?)
1530 }
1531
1532 fn maybe_abandon_wc_commit(
1533 &mut self,
1534 workspace_name: &WorkspaceName,
1535 ) -> Result<(), EditCommitError> {
1536 let is_commit_referenced = |view: &View, commit_id: &CommitId| -> bool {
1537 view.wc_commit_ids()
1538 .iter()
1539 .filter(|&(name, _)| name != workspace_name)
1540 .map(|(_, wc_id)| wc_id)
1541 .chain(
1542 view.local_bookmarks()
1543 .flat_map(|(_, target)| target.added_ids()),
1544 )
1545 .any(|id| id == commit_id)
1546 };
1547
1548 let maybe_wc_commit_id = self
1549 .view
1550 .with_ref(|v| v.get_wc_commit_id(workspace_name).cloned());
1551 if let Some(wc_commit_id) = maybe_wc_commit_id {
1552 let wc_commit = self
1553 .store()
1554 .get_commit(&wc_commit_id)
1555 .map_err(EditCommitError::WorkingCopyCommitNotFound)?;
1556 if wc_commit.is_discardable(self)?
1557 && self
1558 .view
1559 .with_ref(|v| !is_commit_referenced(v, wc_commit.id()))
1560 && self.view().heads().contains(wc_commit.id())
1561 {
1562 self.record_abandoned_commit(&wc_commit);
1566 }
1567 }
1568
1569 Ok(())
1570 }
1571
1572 fn enforce_view_invariants(&self, view: &mut View) {
1573 let view = view.store_view_mut();
1574 let root_commit_id = self.store().root_commit_id();
1575 if view.head_ids.is_empty() {
1576 view.head_ids.insert(root_commit_id.clone());
1577 } else if view.head_ids.len() > 1 {
1578 view.head_ids.remove(root_commit_id);
1581 view.head_ids = self
1585 .index()
1586 .heads(&mut view.head_ids.iter())
1587 .unwrap()
1588 .into_iter()
1589 .collect();
1590 }
1591 assert!(!view.head_ids.is_empty());
1592 }
1593
1594 pub fn add_head(&mut self, head: &Commit) -> BackendResult<()> {
1597 self.add_heads(slice::from_ref(head))
1598 }
1599
1600 pub fn add_heads(&mut self, heads: &[Commit]) -> BackendResult<()> {
1607 let current_heads = self.view.get_mut().heads();
1608 match heads {
1612 [] => {}
1613 [head]
1614 if head
1615 .parent_ids()
1616 .iter()
1617 .all(|parent_id| current_heads.contains(parent_id)) =>
1618 {
1619 self.index
1620 .add_commit(head)
1621 .map_err(|err| BackendError::Other(err.into()))?;
1623 self.view.get_mut().add_head(head.id());
1624 for parent_id in head.parent_ids() {
1625 self.view.get_mut().remove_head(parent_id);
1626 }
1627 }
1628 _ => {
1629 let missing_commits = dag_walk::topo_order_reverse_ord_ok(
1630 heads
1631 .iter()
1632 .cloned()
1633 .map(CommitByCommitterTimestamp)
1634 .map(Ok),
1635 |CommitByCommitterTimestamp(commit)| commit.id().clone(),
1636 |CommitByCommitterTimestamp(commit)| {
1637 commit
1638 .parent_ids()
1639 .iter()
1640 .filter_map(|id| match self.index().has_id(id) {
1641 Ok(false) => Some(
1642 self.store().get_commit(id).map(CommitByCommitterTimestamp),
1643 ),
1644 Ok(true) => None,
1645 Err(err) => Some(Err(BackendError::Other(err.into()))),
1647 })
1648 .collect_vec()
1649 },
1650 |_| panic!("graph has cycle"),
1651 )?;
1652 for CommitByCommitterTimestamp(missing_commit) in missing_commits.iter().rev() {
1653 self.index
1654 .add_commit(missing_commit)
1655 .map_err(|err| BackendError::Other(err.into()))?;
1657 }
1658 for head in heads {
1659 self.view.get_mut().add_head(head.id());
1660 }
1661 self.view.mark_dirty();
1662 }
1663 }
1664 Ok(())
1665 }
1666
1667 pub fn remove_head(&mut self, head: &CommitId) {
1668 self.view_mut().remove_head(head);
1669 self.view.mark_dirty();
1670 }
1671
1672 pub fn get_local_bookmark(&self, name: &RefName) -> RefTarget {
1673 self.view.with_ref(|v| v.get_local_bookmark(name).clone())
1674 }
1675
1676 pub fn set_local_bookmark_target(&mut self, name: &RefName, target: RefTarget) {
1677 let view = self.view_mut();
1678 for id in target.added_ids() {
1679 view.add_head(id);
1680 }
1681 view.set_local_bookmark_target(name, target);
1682 self.view.mark_dirty();
1683 }
1684
1685 pub fn merge_local_bookmark(
1686 &mut self,
1687 name: &RefName,
1688 base_target: &RefTarget,
1689 other_target: &RefTarget,
1690 ) -> IndexResult<()> {
1691 let view = self.view.get_mut();
1692 let index = self.index.as_index();
1693 let self_target = view.get_local_bookmark(name);
1694 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1695 self.set_local_bookmark_target(name, new_target);
1696 Ok(())
1697 }
1698
1699 pub fn get_remote_bookmark(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1700 self.view
1701 .with_ref(|v| v.get_remote_bookmark(symbol).clone())
1702 }
1703
1704 pub fn set_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1705 self.view_mut().set_remote_bookmark(symbol, remote_ref);
1706 }
1707
1708 fn merge_remote_bookmark(
1709 &mut self,
1710 symbol: RemoteRefSymbol<'_>,
1711 base_ref: &RemoteRef,
1712 other_ref: &RemoteRef,
1713 ) -> IndexResult<()> {
1714 let view = self.view.get_mut();
1715 let index = self.index.as_index();
1716 let self_ref = view.get_remote_bookmark(symbol);
1717 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref)?;
1718 view.set_remote_bookmark(symbol, new_ref);
1719 Ok(())
1720 }
1721
1722 pub fn track_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) -> IndexResult<()> {
1725 let mut remote_ref = self.get_remote_bookmark(symbol);
1726 let base_target = remote_ref.tracked_target();
1727 self.merge_local_bookmark(symbol.name, base_target, &remote_ref.target)?;
1728 remote_ref.state = RemoteRefState::Tracked;
1729 self.set_remote_bookmark(symbol, remote_ref);
1730 Ok(())
1731 }
1732
1733 pub fn untrack_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1735 let mut remote_ref = self.get_remote_bookmark(symbol);
1736 remote_ref.state = RemoteRefState::New;
1737 self.set_remote_bookmark(symbol, remote_ref);
1738 }
1739
1740 pub fn ensure_remote(&mut self, remote_name: &RemoteName) {
1741 self.view_mut().ensure_remote(remote_name);
1742 }
1743
1744 pub fn remove_remote(&mut self, remote_name: &RemoteName) {
1745 self.view_mut().remove_remote(remote_name);
1746 }
1747
1748 pub fn rename_remote(&mut self, old: &RemoteName, new: &RemoteName) {
1749 self.view_mut().rename_remote(old, new);
1750 }
1751
1752 pub fn get_local_tag(&self, name: &RefName) -> RefTarget {
1753 self.view.with_ref(|v| v.get_local_tag(name).clone())
1754 }
1755
1756 pub fn set_local_tag_target(&mut self, name: &RefName, target: RefTarget) {
1757 self.view_mut().set_local_tag_target(name, target);
1758 }
1759
1760 pub fn merge_local_tag(
1761 &mut self,
1762 name: &RefName,
1763 base_target: &RefTarget,
1764 other_target: &RefTarget,
1765 ) -> IndexResult<()> {
1766 let view = self.view.get_mut();
1767 let index = self.index.as_index();
1768 let self_target = view.get_local_tag(name);
1769 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1770 view.set_local_tag_target(name, new_target);
1771 Ok(())
1772 }
1773
1774 pub fn get_remote_tag(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1775 self.view.with_ref(|v| v.get_remote_tag(symbol).clone())
1776 }
1777
1778 pub fn set_remote_tag(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1779 self.view_mut().set_remote_tag(symbol, remote_ref);
1780 }
1781
1782 fn merge_remote_tag(
1783 &mut self,
1784 symbol: RemoteRefSymbol<'_>,
1785 base_ref: &RemoteRef,
1786 other_ref: &RemoteRef,
1787 ) -> IndexResult<()> {
1788 let view = self.view.get_mut();
1789 let index = self.index.as_index();
1790 let self_ref = view.get_remote_tag(symbol);
1791 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref)?;
1792 view.set_remote_tag(symbol, new_ref);
1793 Ok(())
1794 }
1795
1796 pub fn get_git_ref(&self, name: &GitRefName) -> RefTarget {
1797 self.view.with_ref(|v| v.get_git_ref(name).clone())
1798 }
1799
1800 pub fn set_git_ref_target(&mut self, name: &GitRefName, target: RefTarget) {
1801 self.view_mut().set_git_ref_target(name, target);
1802 }
1803
1804 fn merge_git_ref(
1805 &mut self,
1806 name: &GitRefName,
1807 base_target: &RefTarget,
1808 other_target: &RefTarget,
1809 ) -> IndexResult<()> {
1810 let view = self.view.get_mut();
1811 let index = self.index.as_index();
1812 let self_target = view.get_git_ref(name);
1813 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1814 view.set_git_ref_target(name, new_target);
1815 Ok(())
1816 }
1817
1818 pub fn git_head(&self) -> RefTarget {
1819 self.view.with_ref(|v| v.git_head().clone())
1820 }
1821
1822 pub fn set_git_head_target(&mut self, target: RefTarget) {
1823 self.view_mut().set_git_head_target(target);
1824 }
1825
1826 pub fn set_view(&mut self, data: op_store::View) {
1827 self.view_mut().set_view(data);
1828 self.view.mark_dirty();
1829 }
1830
1831 pub fn merge(
1832 &mut self,
1833 base_repo: &ReadonlyRepo,
1834 other_repo: &ReadonlyRepo,
1835 ) -> Result<(), RepoLoaderError> {
1836 self.index.merge_in(base_repo.readonly_index())?;
1841 self.index.merge_in(other_repo.readonly_index())?;
1842
1843 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
1844 self.merge_view(&base_repo.view, &other_repo.view)?;
1845 self.view.mark_dirty();
1846 Ok(())
1847 }
1848
1849 pub fn merge_index(&mut self, other_repo: &ReadonlyRepo) -> IndexResult<()> {
1850 self.index.merge_in(other_repo.readonly_index())
1851 }
1852
1853 fn merge_view(&mut self, base: &View, other: &View) -> Result<(), RepoLoaderError> {
1854 let changed_wc_commits = diff_named_commit_ids(base.wc_commit_ids(), other.wc_commit_ids());
1855 for (name, (base_id, other_id)) in changed_wc_commits {
1856 self.merge_wc_commit(name, base_id, other_id);
1857 }
1858
1859 let base_heads = base.heads().iter().cloned().collect_vec();
1860 let own_heads = self.view().heads().iter().cloned().collect_vec();
1861 let other_heads = other.heads().iter().cloned().collect_vec();
1862
1863 if self.is_backed_by_default_index() {
1870 self.record_rewrites(&base_heads, &own_heads)?;
1871 self.record_rewrites(&base_heads, &other_heads)?;
1872 } else {
1875 for removed_head in base.heads().difference(other.heads()) {
1876 self.view_mut().remove_head(removed_head);
1877 }
1878 }
1879 for added_head in other.heads().difference(base.heads()) {
1880 self.view_mut().add_head(added_head);
1881 }
1882
1883 let changed_local_bookmarks =
1884 diff_named_ref_targets(base.local_bookmarks(), other.local_bookmarks());
1885 for (name, (base_target, other_target)) in changed_local_bookmarks {
1886 self.merge_local_bookmark(name, base_target, other_target)?;
1887 }
1888
1889 let changed_local_tags = diff_named_ref_targets(base.local_tags(), other.local_tags());
1890 for (name, (base_target, other_target)) in changed_local_tags {
1891 self.merge_local_tag(name, base_target, other_target)?;
1892 }
1893
1894 let changed_git_refs = diff_named_ref_targets(base.git_refs(), other.git_refs());
1895 for (name, (base_target, other_target)) in changed_git_refs {
1896 self.merge_git_ref(name, base_target, other_target)?;
1897 }
1898
1899 let changed_remote_bookmarks =
1900 diff_named_remote_refs(base.all_remote_bookmarks(), other.all_remote_bookmarks());
1901 for (symbol, (base_ref, other_ref)) in changed_remote_bookmarks {
1902 self.merge_remote_bookmark(symbol, base_ref, other_ref)?;
1903 }
1904
1905 let changed_remote_tags =
1906 diff_named_remote_refs(base.all_remote_tags(), other.all_remote_tags());
1907 for (symbol, (base_ref, other_ref)) in changed_remote_tags {
1908 self.merge_remote_tag(symbol, base_ref, other_ref)?;
1909 }
1910
1911 let new_git_head_target = merge_ref_targets(
1912 self.index(),
1913 self.view().git_head(),
1914 base.git_head(),
1915 other.git_head(),
1916 )?;
1917 self.set_git_head_target(new_git_head_target);
1918
1919 Ok(())
1920 }
1921
1922 fn record_rewrites(
1925 &mut self,
1926 old_heads: &[CommitId],
1927 new_heads: &[CommitId],
1928 ) -> BackendResult<()> {
1929 let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
1930 for item in revset::walk_revs(self, old_heads, new_heads)
1931 .map_err(|err| err.into_backend_error())?
1932 .commit_change_ids()
1933 {
1934 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1935 removed_changes
1936 .entry(change_id)
1937 .or_default()
1938 .push(commit_id);
1939 }
1940 if removed_changes.is_empty() {
1941 return Ok(());
1942 }
1943
1944 let mut rewritten_changes = HashSet::new();
1945 let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1946 for item in revset::walk_revs(self, new_heads, old_heads)
1947 .map_err(|err| err.into_backend_error())?
1948 .commit_change_ids()
1949 {
1950 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1951 if let Some(old_commits) = removed_changes.get(&change_id) {
1952 for old_commit in old_commits {
1953 rewritten_commits
1954 .entry(old_commit.clone())
1955 .or_default()
1956 .push(commit_id.clone());
1957 }
1958 }
1959 rewritten_changes.insert(change_id);
1960 }
1961 for (old_commit, new_commits) in rewritten_commits {
1962 if new_commits.len() == 1 {
1963 self.set_rewritten_commit(
1964 old_commit.clone(),
1965 new_commits.into_iter().next().unwrap(),
1966 );
1967 } else {
1968 self.set_divergent_rewrite(old_commit.clone(), new_commits);
1969 }
1970 }
1971
1972 for (change_id, removed_commit_ids) in &removed_changes {
1973 if !rewritten_changes.contains(change_id) {
1974 for id in removed_commit_ids {
1975 let commit = self.store().get_commit(id)?;
1976 self.record_abandoned_commit(&commit);
1977 }
1978 }
1979 }
1980
1981 Ok(())
1982 }
1983}
1984
1985impl Repo for MutableRepo {
1986 fn base_repo(&self) -> &ReadonlyRepo {
1987 &self.base_repo
1988 }
1989
1990 fn store(&self) -> &Arc<Store> {
1991 self.base_repo.store()
1992 }
1993
1994 fn op_store(&self) -> &Arc<dyn OpStore> {
1995 self.base_repo.op_store()
1996 }
1997
1998 fn index(&self) -> &dyn Index {
1999 self.index.as_index()
2000 }
2001
2002 fn view(&self) -> &View {
2003 self.view
2004 .get_or_ensure_clean(|v| self.enforce_view_invariants(v))
2005 }
2006
2007 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
2008 self.base_repo.submodule_store()
2009 }
2010
2011 fn resolve_change_id_prefix(
2012 &self,
2013 prefix: &HexPrefix,
2014 ) -> IndexResult<PrefixResolution<ResolvedChangeTargets>> {
2015 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
2016 change_id_index.resolve_prefix(prefix)
2017 }
2018
2019 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> IndexResult<usize> {
2020 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
2021 change_id_index.shortest_unique_prefix_len(target_id)
2022 }
2023}
2024
2025#[derive(Debug, Error)]
2027#[error("Cannot rewrite the root commit")]
2028pub struct RewriteRootCommit;
2029
2030#[derive(Debug, Error)]
2032pub enum EditCommitError {
2033 #[error("Current working-copy commit not found")]
2034 WorkingCopyCommitNotFound(#[source] BackendError),
2035 #[error(transparent)]
2036 RewriteRootCommit(#[from] RewriteRootCommit),
2037 #[error(transparent)]
2038 BackendError(#[from] BackendError),
2039}
2040
2041#[derive(Debug, Error)]
2043pub enum CheckOutCommitError {
2044 #[error("Failed to create new working-copy commit")]
2045 CreateCommit(#[from] BackendError),
2046 #[error("Failed to edit commit")]
2047 EditCommit(#[from] EditCommitError),
2048}
2049
2050mod dirty_cell {
2051 use std::cell::OnceCell;
2052 use std::cell::RefCell;
2053
2054 #[derive(Clone, Debug)]
2058 pub struct DirtyCell<T> {
2059 clean: OnceCell<Box<T>>,
2062 dirty: RefCell<Option<Box<T>>>,
2063 }
2064
2065 impl<T> DirtyCell<T> {
2066 pub fn with_clean(value: T) -> Self {
2067 Self {
2068 clean: OnceCell::from(Box::new(value)),
2069 dirty: RefCell::new(None),
2070 }
2071 }
2072
2073 pub fn get_or_ensure_clean(&self, f: impl FnOnce(&mut T)) -> &T {
2074 self.clean.get_or_init(|| {
2075 let mut value = self.dirty.borrow_mut().take().unwrap();
2077 f(&mut value);
2078 value
2079 })
2080 }
2081
2082 pub fn ensure_clean(&self, f: impl FnOnce(&mut T)) {
2083 self.get_or_ensure_clean(f);
2084 }
2085
2086 pub fn into_inner(self) -> T {
2087 *self
2088 .clean
2089 .into_inner()
2090 .or_else(|| self.dirty.into_inner())
2091 .unwrap()
2092 }
2093
2094 pub fn with_ref<R>(&self, f: impl FnOnce(&T) -> R) -> R {
2095 if let Some(value) = self.clean.get() {
2096 f(value)
2097 } else {
2098 f(self.dirty.borrow().as_ref().unwrap())
2099 }
2100 }
2101
2102 pub fn get_mut(&mut self) -> &mut T {
2103 self.clean
2104 .get_mut()
2105 .or_else(|| self.dirty.get_mut().as_mut())
2106 .unwrap()
2107 }
2108
2109 pub fn mark_dirty(&mut self) {
2110 if let Some(value) = self.clean.take() {
2111 *self.dirty.get_mut() = Some(value);
2112 }
2113 }
2114 }
2115}