1#![expect(missing_docs)]
16
17use std::collections::BTreeMap;
18use std::collections::HashMap;
19use std::collections::HashSet;
20use std::collections::hash_map::Entry;
21use std::fmt::Debug;
22use std::fmt::Formatter;
23use std::fs;
24use std::path::Path;
25use std::slice;
26use std::sync::Arc;
27
28use itertools::Itertools as _;
29use once_cell::sync::OnceCell;
30use pollster::FutureExt as _;
31use thiserror::Error;
32use tracing::instrument;
33
34use self::dirty_cell::DirtyCell;
35use crate::backend::Backend;
36use crate::backend::BackendError;
37use crate::backend::BackendInitError;
38use crate::backend::BackendLoadError;
39use crate::backend::BackendResult;
40use crate::backend::ChangeId;
41use crate::backend::CommitId;
42use crate::commit::Commit;
43use crate::commit::CommitByCommitterTimestamp;
44use crate::commit_builder::CommitBuilder;
45use crate::commit_builder::DetachedCommitBuilder;
46use crate::dag_walk;
47use crate::default_index::DefaultIndexStore;
48use crate::default_index::DefaultMutableIndex;
49use crate::default_submodule_store::DefaultSubmoduleStore;
50use crate::file_util::IoResultExt as _;
51use crate::file_util::PathError;
52use crate::index::ChangeIdIndex;
53use crate::index::Index;
54use crate::index::IndexError;
55use crate::index::IndexResult;
56use crate::index::IndexStore;
57use crate::index::IndexStoreError;
58use crate::index::MutableIndex;
59use crate::index::ReadonlyIndex;
60use crate::merge::MergeBuilder;
61use crate::merge::SameChange;
62use crate::merge::trivial_merge;
63use crate::merged_tree::MergedTree;
64use crate::object_id::HexPrefix;
65use crate::object_id::PrefixResolution;
66use crate::op_heads_store;
67use crate::op_heads_store::OpHeadResolutionError;
68use crate::op_heads_store::OpHeadsStore;
69use crate::op_heads_store::OpHeadsStoreError;
70use crate::op_store;
71use crate::op_store::OpStore;
72use crate::op_store::OpStoreError;
73use crate::op_store::OpStoreResult;
74use crate::op_store::OperationId;
75use crate::op_store::RefTarget;
76use crate::op_store::RemoteRef;
77use crate::op_store::RemoteRefState;
78use crate::op_store::RootOperationData;
79use crate::operation::Operation;
80use crate::ref_name::GitRefName;
81use crate::ref_name::RefName;
82use crate::ref_name::RemoteName;
83use crate::ref_name::RemoteRefSymbol;
84use crate::ref_name::WorkspaceName;
85use crate::ref_name::WorkspaceNameBuf;
86use crate::refs::diff_named_commit_ids;
87use crate::refs::diff_named_ref_targets;
88use crate::refs::diff_named_remote_refs;
89use crate::refs::merge_ref_targets;
90use crate::refs::merge_remote_refs;
91use crate::revset;
92use crate::revset::RevsetEvaluationError;
93use crate::revset::RevsetExpression;
94use crate::revset::RevsetIteratorExt as _;
95use crate::rewrite::CommitRewriter;
96use crate::rewrite::RebaseOptions;
97use crate::rewrite::RebasedCommit;
98use crate::rewrite::RewriteRefsOptions;
99use crate::rewrite::merge_commit_trees;
100use crate::rewrite::rebase_commit_with_options;
101use crate::settings::UserSettings;
102use crate::signing::SignInitError;
103use crate::signing::Signer;
104use crate::simple_backend::SimpleBackend;
105use crate::simple_op_heads_store::SimpleOpHeadsStore;
106use crate::simple_op_store::SimpleOpStore;
107use crate::store::Store;
108use crate::submodule_store::SubmoduleStore;
109use crate::transaction::Transaction;
110use crate::transaction::TransactionCommitError;
111use crate::tree_merge::MergeOptions;
112use crate::view::RenameWorkspaceError;
113use crate::view::View;
114
115pub trait Repo {
116 fn base_repo(&self) -> &ReadonlyRepo;
119
120 fn store(&self) -> &Arc<Store>;
121
122 fn op_store(&self) -> &Arc<dyn OpStore>;
123
124 fn index(&self) -> &dyn Index;
125
126 fn view(&self) -> &View;
127
128 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore>;
129
130 fn resolve_change_id(&self, change_id: &ChangeId) -> IndexResult<Option<Vec<CommitId>>> {
131 let prefix = HexPrefix::from_id(change_id);
133 match self.resolve_change_id_prefix(&prefix)? {
134 PrefixResolution::NoMatch => Ok(None),
135 PrefixResolution::SingleMatch(entries) => Ok(Some(entries)),
136 PrefixResolution::AmbiguousMatch => panic!("complete change_id should be unambiguous"),
137 }
138 }
139
140 fn resolve_change_id_prefix(
141 &self,
142 prefix: &HexPrefix,
143 ) -> IndexResult<PrefixResolution<Vec<CommitId>>>;
144
145 fn shortest_unique_change_id_prefix_len(
146 &self,
147 target_id_bytes: &ChangeId,
148 ) -> IndexResult<usize>;
149}
150
151pub struct ReadonlyRepo {
152 loader: RepoLoader,
153 operation: Operation,
154 index: Box<dyn ReadonlyIndex>,
155 change_id_index: OnceCell<Box<dyn ChangeIdIndex>>,
156 view: View,
158}
159
160impl Debug for ReadonlyRepo {
161 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
162 f.debug_struct("ReadonlyRepo")
163 .field("store", &self.loader.store)
164 .finish_non_exhaustive()
165 }
166}
167
168#[derive(Error, Debug)]
169pub enum RepoInitError {
170 #[error(transparent)]
171 Backend(#[from] BackendInitError),
172 #[error(transparent)]
173 OpHeadsStore(#[from] OpHeadsStoreError),
174 #[error(transparent)]
175 Path(#[from] PathError),
176}
177
178impl ReadonlyRepo {
179 pub fn default_op_store_initializer() -> &'static OpStoreInitializer<'static> {
180 &|_settings, store_path, root_data| {
181 Ok(Box::new(SimpleOpStore::init(store_path, root_data)?))
182 }
183 }
184
185 pub fn default_op_heads_store_initializer() -> &'static OpHeadsStoreInitializer<'static> {
186 &|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::init(store_path)?))
187 }
188
189 pub fn default_index_store_initializer() -> &'static IndexStoreInitializer<'static> {
190 &|_settings, store_path| Ok(Box::new(DefaultIndexStore::init(store_path)?))
191 }
192
193 pub fn default_submodule_store_initializer() -> &'static SubmoduleStoreInitializer<'static> {
194 &|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::init(store_path)))
195 }
196
197 #[expect(clippy::too_many_arguments)]
198 pub fn init(
199 settings: &UserSettings,
200 repo_path: &Path,
201 backend_initializer: &BackendInitializer,
202 signer: Signer,
203 op_store_initializer: &OpStoreInitializer,
204 op_heads_store_initializer: &OpHeadsStoreInitializer,
205 index_store_initializer: &IndexStoreInitializer,
206 submodule_store_initializer: &SubmoduleStoreInitializer,
207 ) -> Result<Arc<Self>, RepoInitError> {
208 let repo_path = dunce::canonicalize(repo_path).context(repo_path)?;
209
210 let store_path = repo_path.join("store");
211 fs::create_dir(&store_path).context(&store_path)?;
212 let backend = backend_initializer(settings, &store_path)?;
213 let backend_path = store_path.join("type");
214 fs::write(&backend_path, backend.name()).context(&backend_path)?;
215 let merge_options =
216 MergeOptions::from_settings(settings).map_err(|err| BackendInitError(err.into()))?;
217 let store = Store::new(backend, signer, merge_options);
218
219 let op_store_path = repo_path.join("op_store");
220 fs::create_dir(&op_store_path).context(&op_store_path)?;
221 let root_op_data = RootOperationData {
222 root_commit_id: store.root_commit_id().clone(),
223 };
224 let op_store = op_store_initializer(settings, &op_store_path, root_op_data)?;
225 let op_store_type_path = op_store_path.join("type");
226 fs::write(&op_store_type_path, op_store.name()).context(&op_store_type_path)?;
227 let op_store: Arc<dyn OpStore> = Arc::from(op_store);
228
229 let op_heads_path = repo_path.join("op_heads");
230 fs::create_dir(&op_heads_path).context(&op_heads_path)?;
231 let op_heads_store = op_heads_store_initializer(settings, &op_heads_path)?;
232 let op_heads_type_path = op_heads_path.join("type");
233 fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?;
234 op_heads_store
235 .update_op_heads(&[], op_store.root_operation_id())
236 .block_on()?;
237 let op_heads_store: Arc<dyn OpHeadsStore> = Arc::from(op_heads_store);
238
239 let index_path = repo_path.join("index");
240 fs::create_dir(&index_path).context(&index_path)?;
241 let index_store = index_store_initializer(settings, &index_path)?;
242 let index_type_path = index_path.join("type");
243 fs::write(&index_type_path, index_store.name()).context(&index_type_path)?;
244 let index_store: Arc<dyn IndexStore> = Arc::from(index_store);
245
246 let submodule_store_path = repo_path.join("submodule_store");
247 fs::create_dir(&submodule_store_path).context(&submodule_store_path)?;
248 let submodule_store = submodule_store_initializer(settings, &submodule_store_path)?;
249 let submodule_store_type_path = submodule_store_path.join("type");
250 fs::write(&submodule_store_type_path, submodule_store.name())
251 .context(&submodule_store_type_path)?;
252 let submodule_store = Arc::from(submodule_store);
253
254 let loader = RepoLoader {
255 settings: settings.clone(),
256 store,
257 op_store,
258 op_heads_store,
259 index_store,
260 submodule_store,
261 };
262
263 let root_operation = loader.root_operation();
264 let root_view = root_operation.view().expect("failed to read root view");
265 assert!(!root_view.heads().is_empty());
266 let index = loader
267 .index_store
268 .get_index_at_op(&root_operation, &loader.store)
269 .map_err(|err| BackendInitError(err.into()))?;
272 Ok(Arc::new(Self {
273 loader,
274 operation: root_operation,
275 index,
276 change_id_index: OnceCell::new(),
277 view: root_view,
278 }))
279 }
280
281 pub fn loader(&self) -> &RepoLoader {
282 &self.loader
283 }
284
285 pub fn op_id(&self) -> &OperationId {
286 self.operation.id()
287 }
288
289 pub fn operation(&self) -> &Operation {
290 &self.operation
291 }
292
293 pub fn view(&self) -> &View {
294 &self.view
295 }
296
297 pub fn readonly_index(&self) -> &dyn ReadonlyIndex {
298 self.index.as_ref()
299 }
300
301 fn change_id_index(&self) -> &dyn ChangeIdIndex {
302 self.change_id_index
303 .get_or_init(|| {
304 self.readonly_index()
305 .change_id_index(&mut self.view().heads().iter())
306 })
307 .as_ref()
308 }
309
310 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
311 self.loader.op_heads_store()
312 }
313
314 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
315 self.loader.index_store()
316 }
317
318 pub fn settings(&self) -> &UserSettings {
319 self.loader.settings()
320 }
321
322 pub fn start_transaction(self: &Arc<Self>) -> Transaction {
323 let mut_repo = MutableRepo::new(self.clone(), self.readonly_index(), &self.view);
324 Transaction::new(mut_repo, self.settings())
325 }
326
327 pub fn reload_at_head(&self) -> Result<Arc<Self>, RepoLoaderError> {
328 self.loader().load_at_head()
329 }
330
331 #[instrument]
332 pub fn reload_at(&self, operation: &Operation) -> Result<Arc<Self>, RepoLoaderError> {
333 self.loader().load_at(operation)
334 }
335}
336
337impl Repo for ReadonlyRepo {
338 fn base_repo(&self) -> &ReadonlyRepo {
339 self
340 }
341
342 fn store(&self) -> &Arc<Store> {
343 self.loader.store()
344 }
345
346 fn op_store(&self) -> &Arc<dyn OpStore> {
347 self.loader.op_store()
348 }
349
350 fn index(&self) -> &dyn Index {
351 self.readonly_index().as_index()
352 }
353
354 fn view(&self) -> &View {
355 &self.view
356 }
357
358 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
359 self.loader.submodule_store()
360 }
361
362 fn resolve_change_id_prefix(
363 &self,
364 prefix: &HexPrefix,
365 ) -> IndexResult<PrefixResolution<Vec<CommitId>>> {
366 self.change_id_index().resolve_prefix(prefix)
367 }
368
369 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> IndexResult<usize> {
370 self.change_id_index().shortest_unique_prefix_len(target_id)
371 }
372}
373
374pub type BackendInitializer<'a> =
375 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendInitError> + 'a;
376#[rustfmt::skip] pub type OpStoreInitializer<'a> =
378 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendInitError>
379 + 'a;
380pub type OpHeadsStoreInitializer<'a> =
381 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendInitError> + 'a;
382pub type IndexStoreInitializer<'a> =
383 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendInitError> + 'a;
384pub type SubmoduleStoreInitializer<'a> =
385 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendInitError> + 'a;
386
387type BackendFactory =
388 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendLoadError>>;
389type OpStoreFactory = Box<
390 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendLoadError>,
391>;
392type OpHeadsStoreFactory =
393 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendLoadError>>;
394type IndexStoreFactory =
395 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendLoadError>>;
396type SubmoduleStoreFactory =
397 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendLoadError>>;
398
399pub fn merge_factories_map<F>(base: &mut HashMap<String, F>, ext: HashMap<String, F>) {
400 for (name, factory) in ext {
401 match base.entry(name) {
402 Entry::Vacant(v) => {
403 v.insert(factory);
404 }
405 Entry::Occupied(o) => {
406 panic!("Conflicting factory definitions for '{}' factory", o.key())
407 }
408 }
409 }
410}
411
412pub struct StoreFactories {
413 backend_factories: HashMap<String, BackendFactory>,
414 op_store_factories: HashMap<String, OpStoreFactory>,
415 op_heads_store_factories: HashMap<String, OpHeadsStoreFactory>,
416 index_store_factories: HashMap<String, IndexStoreFactory>,
417 submodule_store_factories: HashMap<String, SubmoduleStoreFactory>,
418}
419
420impl Default for StoreFactories {
421 fn default() -> Self {
422 let mut factories = Self::empty();
423
424 factories.add_backend(
426 SimpleBackend::name(),
427 Box::new(|_settings, store_path| Ok(Box::new(SimpleBackend::load(store_path)))),
428 );
429 #[cfg(feature = "git")]
430 factories.add_backend(
431 crate::git_backend::GitBackend::name(),
432 Box::new(|settings, store_path| {
433 Ok(Box::new(crate::git_backend::GitBackend::load(
434 settings, store_path,
435 )?))
436 }),
437 );
438 #[cfg(feature = "testing")]
439 factories.add_backend(
440 crate::secret_backend::SecretBackend::name(),
441 Box::new(|settings, store_path| {
442 Ok(Box::new(crate::secret_backend::SecretBackend::load(
443 settings, store_path,
444 )?))
445 }),
446 );
447
448 factories.add_op_store(
450 SimpleOpStore::name(),
451 Box::new(|_settings, store_path, root_data| {
452 Ok(Box::new(SimpleOpStore::load(store_path, root_data)))
453 }),
454 );
455
456 factories.add_op_heads_store(
458 SimpleOpHeadsStore::name(),
459 Box::new(|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::load(store_path)))),
460 );
461
462 factories.add_index_store(
464 DefaultIndexStore::name(),
465 Box::new(|_settings, store_path| Ok(Box::new(DefaultIndexStore::load(store_path)))),
466 );
467
468 factories.add_submodule_store(
470 DefaultSubmoduleStore::name(),
471 Box::new(|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::load(store_path)))),
472 );
473
474 factories
475 }
476}
477
478#[derive(Debug, Error)]
479pub enum StoreLoadError {
480 #[error("Unsupported {store} backend type '{store_type}'")]
481 UnsupportedType {
482 store: &'static str,
483 store_type: String,
484 },
485 #[error("Failed to read {store} backend type")]
486 ReadError {
487 store: &'static str,
488 source: PathError,
489 },
490 #[error(transparent)]
491 Backend(#[from] BackendLoadError),
492 #[error(transparent)]
493 Signing(#[from] SignInitError),
494}
495
496impl StoreFactories {
497 pub fn empty() -> Self {
498 Self {
499 backend_factories: HashMap::new(),
500 op_store_factories: HashMap::new(),
501 op_heads_store_factories: HashMap::new(),
502 index_store_factories: HashMap::new(),
503 submodule_store_factories: HashMap::new(),
504 }
505 }
506
507 pub fn merge(&mut self, ext: Self) {
508 let Self {
509 backend_factories,
510 op_store_factories,
511 op_heads_store_factories,
512 index_store_factories,
513 submodule_store_factories,
514 } = ext;
515
516 merge_factories_map(&mut self.backend_factories, backend_factories);
517 merge_factories_map(&mut self.op_store_factories, op_store_factories);
518 merge_factories_map(&mut self.op_heads_store_factories, op_heads_store_factories);
519 merge_factories_map(&mut self.index_store_factories, index_store_factories);
520 merge_factories_map(
521 &mut self.submodule_store_factories,
522 submodule_store_factories,
523 );
524 }
525
526 pub fn add_backend(&mut self, name: &str, factory: BackendFactory) {
527 self.backend_factories.insert(name.to_string(), factory);
528 }
529
530 pub fn load_backend(
531 &self,
532 settings: &UserSettings,
533 store_path: &Path,
534 ) -> Result<Box<dyn Backend>, StoreLoadError> {
535 let backend_type = read_store_type("commit", store_path.join("type"))?;
536 let backend_factory = self.backend_factories.get(&backend_type).ok_or_else(|| {
537 StoreLoadError::UnsupportedType {
538 store: "commit",
539 store_type: backend_type.clone(),
540 }
541 })?;
542 Ok(backend_factory(settings, store_path)?)
543 }
544
545 pub fn add_op_store(&mut self, name: &str, factory: OpStoreFactory) {
546 self.op_store_factories.insert(name.to_string(), factory);
547 }
548
549 pub fn load_op_store(
550 &self,
551 settings: &UserSettings,
552 store_path: &Path,
553 root_data: RootOperationData,
554 ) -> Result<Box<dyn OpStore>, StoreLoadError> {
555 let op_store_type = read_store_type("operation", store_path.join("type"))?;
556 let op_store_factory = self.op_store_factories.get(&op_store_type).ok_or_else(|| {
557 StoreLoadError::UnsupportedType {
558 store: "operation",
559 store_type: op_store_type.clone(),
560 }
561 })?;
562 Ok(op_store_factory(settings, store_path, root_data)?)
563 }
564
565 pub fn add_op_heads_store(&mut self, name: &str, factory: OpHeadsStoreFactory) {
566 self.op_heads_store_factories
567 .insert(name.to_string(), factory);
568 }
569
570 pub fn load_op_heads_store(
571 &self,
572 settings: &UserSettings,
573 store_path: &Path,
574 ) -> Result<Box<dyn OpHeadsStore>, StoreLoadError> {
575 let op_heads_store_type = read_store_type("operation heads", store_path.join("type"))?;
576 let op_heads_store_factory = self
577 .op_heads_store_factories
578 .get(&op_heads_store_type)
579 .ok_or_else(|| StoreLoadError::UnsupportedType {
580 store: "operation heads",
581 store_type: op_heads_store_type.clone(),
582 })?;
583 Ok(op_heads_store_factory(settings, store_path)?)
584 }
585
586 pub fn add_index_store(&mut self, name: &str, factory: IndexStoreFactory) {
587 self.index_store_factories.insert(name.to_string(), factory);
588 }
589
590 pub fn load_index_store(
591 &self,
592 settings: &UserSettings,
593 store_path: &Path,
594 ) -> Result<Box<dyn IndexStore>, StoreLoadError> {
595 let index_store_type = read_store_type("index", store_path.join("type"))?;
596 let index_store_factory = self
597 .index_store_factories
598 .get(&index_store_type)
599 .ok_or_else(|| StoreLoadError::UnsupportedType {
600 store: "index",
601 store_type: index_store_type.clone(),
602 })?;
603 Ok(index_store_factory(settings, store_path)?)
604 }
605
606 pub fn add_submodule_store(&mut self, name: &str, factory: SubmoduleStoreFactory) {
607 self.submodule_store_factories
608 .insert(name.to_string(), factory);
609 }
610
611 pub fn load_submodule_store(
612 &self,
613 settings: &UserSettings,
614 store_path: &Path,
615 ) -> Result<Box<dyn SubmoduleStore>, StoreLoadError> {
616 let submodule_store_type = read_store_type("submodule_store", store_path.join("type"))?;
617 let submodule_store_factory = self
618 .submodule_store_factories
619 .get(&submodule_store_type)
620 .ok_or_else(|| StoreLoadError::UnsupportedType {
621 store: "submodule_store",
622 store_type: submodule_store_type.clone(),
623 })?;
624
625 Ok(submodule_store_factory(settings, store_path)?)
626 }
627}
628
629pub fn read_store_type(
630 store: &'static str,
631 path: impl AsRef<Path>,
632) -> Result<String, StoreLoadError> {
633 let path = path.as_ref();
634 fs::read_to_string(path)
635 .context(path)
636 .map_err(|source| StoreLoadError::ReadError { store, source })
637}
638
639#[derive(Debug, Error)]
640pub enum RepoLoaderError {
641 #[error(transparent)]
642 Backend(#[from] BackendError),
643 #[error(transparent)]
644 Index(#[from] IndexError),
645 #[error(transparent)]
646 IndexStore(#[from] IndexStoreError),
647 #[error(transparent)]
648 OpHeadResolution(#[from] OpHeadResolutionError),
649 #[error(transparent)]
650 OpHeadsStoreError(#[from] OpHeadsStoreError),
651 #[error(transparent)]
652 OpStore(#[from] OpStoreError),
653 #[error(transparent)]
654 TransactionCommit(#[from] TransactionCommitError),
655}
656
657#[derive(Clone)]
660pub struct RepoLoader {
661 settings: UserSettings,
662 store: Arc<Store>,
663 op_store: Arc<dyn OpStore>,
664 op_heads_store: Arc<dyn OpHeadsStore>,
665 index_store: Arc<dyn IndexStore>,
666 submodule_store: Arc<dyn SubmoduleStore>,
667}
668
669impl RepoLoader {
670 pub fn new(
671 settings: UserSettings,
672 store: Arc<Store>,
673 op_store: Arc<dyn OpStore>,
674 op_heads_store: Arc<dyn OpHeadsStore>,
675 index_store: Arc<dyn IndexStore>,
676 submodule_store: Arc<dyn SubmoduleStore>,
677 ) -> Self {
678 Self {
679 settings,
680 store,
681 op_store,
682 op_heads_store,
683 index_store,
684 submodule_store,
685 }
686 }
687
688 pub fn init_from_file_system(
692 settings: &UserSettings,
693 repo_path: &Path,
694 store_factories: &StoreFactories,
695 ) -> Result<Self, StoreLoadError> {
696 let merge_options =
697 MergeOptions::from_settings(settings).map_err(|err| BackendLoadError(err.into()))?;
698 let store = Store::new(
699 store_factories.load_backend(settings, &repo_path.join("store"))?,
700 Signer::from_settings(settings)?,
701 merge_options,
702 );
703 let root_op_data = RootOperationData {
704 root_commit_id: store.root_commit_id().clone(),
705 };
706 let op_store = Arc::from(store_factories.load_op_store(
707 settings,
708 &repo_path.join("op_store"),
709 root_op_data,
710 )?);
711 let op_heads_store =
712 Arc::from(store_factories.load_op_heads_store(settings, &repo_path.join("op_heads"))?);
713 let index_store =
714 Arc::from(store_factories.load_index_store(settings, &repo_path.join("index"))?);
715 let submodule_store = Arc::from(
716 store_factories.load_submodule_store(settings, &repo_path.join("submodule_store"))?,
717 );
718 Ok(Self {
719 settings: settings.clone(),
720 store,
721 op_store,
722 op_heads_store,
723 index_store,
724 submodule_store,
725 })
726 }
727
728 pub fn settings(&self) -> &UserSettings {
729 &self.settings
730 }
731
732 pub fn store(&self) -> &Arc<Store> {
733 &self.store
734 }
735
736 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
737 &self.index_store
738 }
739
740 pub fn op_store(&self) -> &Arc<dyn OpStore> {
741 &self.op_store
742 }
743
744 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
745 &self.op_heads_store
746 }
747
748 pub fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
749 &self.submodule_store
750 }
751
752 pub fn load_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
753 let op = op_heads_store::resolve_op_heads(
754 self.op_heads_store.as_ref(),
755 &self.op_store,
756 |op_heads| self.resolve_op_heads(op_heads),
757 )?;
758 let view = op.view()?;
759 self.finish_load(op, view)
760 }
761
762 #[instrument(skip(self))]
763 pub fn load_at(&self, op: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
764 let view = op.view()?;
765 self.finish_load(op.clone(), view)
766 }
767
768 pub fn create_from(
769 &self,
770 operation: Operation,
771 view: View,
772 index: Box<dyn ReadonlyIndex>,
773 ) -> Arc<ReadonlyRepo> {
774 let repo = ReadonlyRepo {
775 loader: self.clone(),
776 operation,
777 index,
778 change_id_index: OnceCell::new(),
779 view,
780 };
781 Arc::new(repo)
782 }
783
784 pub fn root_operation(&self) -> Operation {
789 self.load_operation(self.op_store.root_operation_id())
790 .expect("failed to read root operation")
791 }
792
793 pub fn load_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
795 let data = self.op_store.read_operation(id).block_on()?;
796 Ok(Operation::new(self.op_store.clone(), id.clone(), data))
797 }
798
799 pub fn merge_operations(
802 &self,
803 operations: Vec<Operation>,
804 tx_description: Option<&str>,
805 ) -> Result<Operation, RepoLoaderError> {
806 let num_operations = operations.len();
807 let mut operations = operations.into_iter();
808 let Some(base_op) = operations.next() else {
809 return Ok(self.root_operation());
810 };
811 let final_op = if num_operations > 1 {
812 let base_repo = self.load_at(&base_op)?;
813 let mut tx = base_repo.start_transaction();
814 for other_op in operations {
815 tx.merge_operation(other_op)?;
816 tx.repo_mut().rebase_descendants()?;
817 }
818 let tx_description = tx_description.map_or_else(
819 || format!("merge {num_operations} operations"),
820 |tx_description| tx_description.to_string(),
821 );
822 let merged_repo = tx.write(tx_description)?.leave_unpublished();
823 merged_repo.operation().clone()
824 } else {
825 base_op
826 };
827
828 Ok(final_op)
829 }
830
831 fn resolve_op_heads(&self, op_heads: Vec<Operation>) -> Result<Operation, RepoLoaderError> {
832 assert!(!op_heads.is_empty());
833 self.merge_operations(op_heads, Some("reconcile divergent operations"))
834 }
835
836 fn finish_load(
837 &self,
838 operation: Operation,
839 view: View,
840 ) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
841 let index = self.index_store.get_index_at_op(&operation, &self.store)?;
842 let repo = ReadonlyRepo {
843 loader: self.clone(),
844 operation,
845 index,
846 change_id_index: OnceCell::new(),
847 view,
848 };
849 Ok(Arc::new(repo))
850 }
851}
852
853#[derive(Clone, Debug, PartialEq, Eq)]
854enum Rewrite {
855 Rewritten(CommitId),
858 Divergent(Vec<CommitId>),
861 Abandoned(Vec<CommitId>),
864}
865
866impl Rewrite {
867 fn new_parent_ids(&self) -> &[CommitId] {
868 match self {
869 Self::Rewritten(new_parent_id) => std::slice::from_ref(new_parent_id),
870 Self::Divergent(new_parent_ids) => new_parent_ids.as_slice(),
871 Self::Abandoned(new_parent_ids) => new_parent_ids.as_slice(),
872 }
873 }
874}
875
876pub struct MutableRepo {
877 base_repo: Arc<ReadonlyRepo>,
878 index: Box<dyn MutableIndex>,
879 view: DirtyCell<View>,
880 commit_predecessors: BTreeMap<CommitId, Vec<CommitId>>,
885 parent_mapping: HashMap<CommitId, Rewrite>,
894}
895
896impl MutableRepo {
897 pub fn new(base_repo: Arc<ReadonlyRepo>, index: &dyn ReadonlyIndex, view: &View) -> Self {
898 let mut_view = view.clone();
899 let mut_index = index.start_modification();
900 Self {
901 base_repo,
902 index: mut_index,
903 view: DirtyCell::with_clean(mut_view),
904 commit_predecessors: Default::default(),
905 parent_mapping: Default::default(),
906 }
907 }
908
909 pub fn base_repo(&self) -> &Arc<ReadonlyRepo> {
910 &self.base_repo
911 }
912
913 fn view_mut(&mut self) -> &mut View {
914 self.view.get_mut()
915 }
916
917 pub fn mutable_index(&self) -> &dyn MutableIndex {
918 self.index.as_ref()
919 }
920
921 pub(crate) fn is_backed_by_default_index(&self) -> bool {
922 self.index.downcast_ref::<DefaultMutableIndex>().is_some()
923 }
924
925 pub fn has_changes(&self) -> bool {
926 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
927 !(self.commit_predecessors.is_empty()
928 && self.parent_mapping.is_empty()
929 && self.view() == &self.base_repo.view)
930 }
931
932 pub(crate) fn consume(
933 self,
934 ) -> (
935 Box<dyn MutableIndex>,
936 View,
937 BTreeMap<CommitId, Vec<CommitId>>,
938 ) {
939 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
940 (self.index, self.view.into_inner(), self.commit_predecessors)
941 }
942
943 pub fn new_commit(&mut self, parents: Vec<CommitId>, tree: MergedTree) -> CommitBuilder<'_> {
945 let settings = self.base_repo.settings();
946 DetachedCommitBuilder::for_new_commit(self, settings, parents, tree).attach(self)
947 }
948
949 pub fn rewrite_commit(&mut self, predecessor: &Commit) -> CommitBuilder<'_> {
951 let settings = self.base_repo.settings();
952 DetachedCommitBuilder::for_rewrite_from(self, settings, predecessor).attach(self)
953 }
956
957 pub(crate) fn set_predecessors(&mut self, id: CommitId, predecessors: Vec<CommitId>) {
958 self.commit_predecessors.insert(id, predecessors);
959 }
960
961 pub fn set_rewritten_commit(&mut self, old_id: CommitId, new_id: CommitId) {
968 assert_ne!(old_id, *self.store().root_commit_id());
969 self.parent_mapping
970 .insert(old_id, Rewrite::Rewritten(new_id));
971 }
972
973 pub fn set_divergent_rewrite(
981 &mut self,
982 old_id: CommitId,
983 new_ids: impl IntoIterator<Item = CommitId>,
984 ) {
985 assert_ne!(old_id, *self.store().root_commit_id());
986 self.parent_mapping.insert(
987 old_id.clone(),
988 Rewrite::Divergent(new_ids.into_iter().collect()),
989 );
990 }
991
992 pub fn record_abandoned_commit(&mut self, old_commit: &Commit) {
1002 assert_ne!(old_commit.id(), self.store().root_commit_id());
1003 self.record_abandoned_commit_with_parents(
1005 old_commit.id().clone(),
1006 old_commit.parent_ids().iter().cloned(),
1007 );
1008 }
1009
1010 pub fn record_abandoned_commit_with_parents(
1016 &mut self,
1017 old_id: CommitId,
1018 new_parent_ids: impl IntoIterator<Item = CommitId>,
1019 ) {
1020 assert_ne!(old_id, *self.store().root_commit_id());
1021 self.parent_mapping.insert(
1022 old_id,
1023 Rewrite::Abandoned(new_parent_ids.into_iter().collect()),
1024 );
1025 }
1026
1027 pub fn has_rewrites(&self) -> bool {
1028 !self.parent_mapping.is_empty()
1029 }
1030
1031 pub fn new_parents(&self, old_ids: &[CommitId]) -> Vec<CommitId> {
1038 self.rewritten_ids_with(old_ids, |rewrite| !matches!(rewrite, Rewrite::Divergent(_)))
1039 }
1040
1041 fn rewritten_ids_with(
1042 &self,
1043 old_ids: &[CommitId],
1044 mut predicate: impl FnMut(&Rewrite) -> bool,
1045 ) -> Vec<CommitId> {
1046 assert!(!old_ids.is_empty());
1047 let mut new_ids = Vec::with_capacity(old_ids.len());
1048 let mut to_visit = old_ids.iter().rev().collect_vec();
1049 let mut visited = HashSet::new();
1050 while let Some(id) = to_visit.pop() {
1051 if !visited.insert(id) {
1052 continue;
1053 }
1054 match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1055 None => {
1056 new_ids.push(id.clone());
1057 }
1058 Some(rewrite) => {
1059 let replacements = rewrite.new_parent_ids();
1060 assert!(
1061 !replacements.is_empty(),
1066 "Found empty value for key {id:?} in the parent mapping",
1067 );
1068 to_visit.extend(replacements.iter().rev());
1069 }
1070 }
1071 }
1072 assert!(
1073 !new_ids.is_empty(),
1074 "new ids become empty because of cycle in the parent mapping"
1075 );
1076 debug_assert!(new_ids.iter().all_unique());
1077 new_ids
1078 }
1079
1080 fn resolve_rewrite_mapping_with(
1084 &self,
1085 mut predicate: impl FnMut(&Rewrite) -> bool,
1086 ) -> BackendResult<HashMap<CommitId, Vec<CommitId>>> {
1087 let sorted_ids = dag_walk::topo_order_forward(
1088 self.parent_mapping.keys(),
1089 |&id| id,
1090 |&id| match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1091 None => &[],
1092 Some(rewrite) => rewrite.new_parent_ids(),
1093 },
1094 |id| {
1095 BackendError::Other(
1096 format!("Cycle between rewritten commits involving commit {id}").into(),
1097 )
1098 },
1099 )?;
1100 let mut new_mapping: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1101 for old_id in sorted_ids {
1102 let Some(rewrite) = self.parent_mapping.get(old_id).filter(|&v| predicate(v)) else {
1103 continue;
1104 };
1105 let lookup = |id| new_mapping.get(id).map_or(slice::from_ref(id), |ids| ids);
1106 let new_ids = match rewrite.new_parent_ids() {
1107 [id] => lookup(id).to_vec(), ids => ids.iter().flat_map(lookup).unique().cloned().collect(),
1109 };
1110 debug_assert_eq!(
1111 new_ids,
1112 self.rewritten_ids_with(slice::from_ref(old_id), &mut predicate)
1113 );
1114 new_mapping.insert(old_id.clone(), new_ids);
1115 }
1116 Ok(new_mapping)
1117 }
1118
1119 pub fn update_rewritten_references(
1122 &mut self,
1123 options: &RewriteRefsOptions,
1124 ) -> BackendResult<()> {
1125 self.update_all_references(options)?;
1126 self.update_heads()
1127 .map_err(|err| err.into_backend_error())?;
1128 Ok(())
1129 }
1130
1131 fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> {
1132 let rewrite_mapping = self.resolve_rewrite_mapping_with(|_| true)?;
1133 self.update_local_bookmarks(&rewrite_mapping, options)
1134 .map_err(|err| BackendError::Other(err.into()))?;
1136 self.update_wc_commits(&rewrite_mapping)?;
1137 Ok(())
1138 }
1139
1140 fn update_local_bookmarks(
1141 &mut self,
1142 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1143 options: &RewriteRefsOptions,
1144 ) -> IndexResult<()> {
1145 let changed_branches = self
1146 .view()
1147 .local_bookmarks()
1148 .flat_map(|(name, target)| {
1149 target.added_ids().filter_map(|id| {
1150 let change = rewrite_mapping.get_key_value(id)?;
1151 Some((name.to_owned(), change))
1152 })
1153 })
1154 .collect_vec();
1155 for (bookmark_name, (old_commit_id, new_commit_ids)) in changed_branches {
1156 let should_delete = options.delete_abandoned_bookmarks
1157 && matches!(
1158 self.parent_mapping.get(old_commit_id),
1159 Some(Rewrite::Abandoned(_))
1160 );
1161 let old_target = RefTarget::normal(old_commit_id.clone());
1162 let new_target = if should_delete {
1163 RefTarget::absent()
1164 } else {
1165 let ids = itertools::intersperse(new_commit_ids, old_commit_id)
1166 .map(|id| Some(id.clone()));
1167 RefTarget::from_merge(MergeBuilder::from_iter(ids).build())
1168 };
1169
1170 self.merge_local_bookmark(&bookmark_name, &old_target, &new_target)?;
1171 }
1172 Ok(())
1173 }
1174
1175 fn update_wc_commits(
1176 &mut self,
1177 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1178 ) -> BackendResult<()> {
1179 let changed_wc_commits = self
1180 .view()
1181 .wc_commit_ids()
1182 .iter()
1183 .filter_map(|(name, commit_id)| {
1184 let change = rewrite_mapping.get_key_value(commit_id)?;
1185 Some((name.to_owned(), change))
1186 })
1187 .collect_vec();
1188 let mut recreated_wc_commits: HashMap<&CommitId, Commit> = HashMap::new();
1189 for (name, (old_commit_id, new_commit_ids)) in changed_wc_commits {
1190 let abandoned_old_commit = matches!(
1191 self.parent_mapping.get(old_commit_id),
1192 Some(Rewrite::Abandoned(_))
1193 );
1194 let new_wc_commit = if !abandoned_old_commit {
1195 self.store().get_commit(&new_commit_ids[0])?
1197 } else if let Some(commit) = recreated_wc_commits.get(old_commit_id) {
1198 commit.clone()
1199 } else {
1200 let new_commits: Vec<_> = new_commit_ids
1201 .iter()
1202 .map(|id| self.store().get_commit(id))
1203 .try_collect()?;
1204 let merged_parents_tree = merge_commit_trees(self, &new_commits).block_on()?;
1205 let commit = self
1206 .new_commit(new_commit_ids.clone(), merged_parents_tree)
1207 .write()?;
1208 recreated_wc_commits.insert(old_commit_id, commit.clone());
1209 commit
1210 };
1211 self.edit(name, &new_wc_commit).map_err(|err| match err {
1212 EditCommitError::BackendError(backend_error) => backend_error,
1213 EditCommitError::WorkingCopyCommitNotFound(_)
1214 | EditCommitError::RewriteRootCommit(_) => panic!("unexpected error: {err:?}"),
1215 })?;
1216 }
1217 Ok(())
1218 }
1219
1220 fn update_heads(&mut self) -> Result<(), RevsetEvaluationError> {
1221 let old_commits_expression =
1222 RevsetExpression::commits(self.parent_mapping.keys().cloned().collect());
1223 let heads_to_add_expression = old_commits_expression
1224 .parents()
1225 .minus(&old_commits_expression);
1226 let heads_to_add: Vec<_> = heads_to_add_expression
1227 .evaluate(self)?
1228 .iter()
1229 .try_collect()?;
1230
1231 let mut view = self.view().store_view().clone();
1232 for commit_id in self.parent_mapping.keys() {
1233 view.head_ids.remove(commit_id);
1234 }
1235 view.head_ids.extend(heads_to_add);
1236 self.set_view(view);
1237 Ok(())
1238 }
1239
1240 pub fn find_descendants_for_rebase(&self, roots: Vec<CommitId>) -> BackendResult<Vec<Commit>> {
1243 let to_visit_revset = RevsetExpression::commits(roots)
1244 .descendants()
1245 .minus(&RevsetExpression::commits(
1246 self.parent_mapping.keys().cloned().collect(),
1247 ))
1248 .evaluate(self)
1249 .map_err(|err| err.into_backend_error())?;
1250 let to_visit = to_visit_revset
1251 .iter()
1252 .commits(self.store())
1253 .try_collect()
1254 .map_err(|err| err.into_backend_error())?;
1255 Ok(to_visit)
1256 }
1257
1258 fn order_commits_for_rebase(
1261 &self,
1262 to_visit: Vec<Commit>,
1263 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1264 ) -> BackendResult<Vec<Commit>> {
1265 let to_visit_set: HashSet<CommitId> =
1266 to_visit.iter().map(|commit| commit.id().clone()).collect();
1267 let mut visited = HashSet::new();
1268 let store = self.store();
1271 dag_walk::topo_order_reverse_ok(
1272 to_visit.into_iter().map(Ok),
1273 |commit| commit.id().clone(),
1274 |commit| -> Vec<BackendResult<Commit>> {
1275 visited.insert(commit.id().clone());
1276 let mut dependents = vec![];
1277 let parent_ids = new_parents_map
1278 .get(commit.id())
1279 .map_or(commit.parent_ids(), |parent_ids| parent_ids);
1280 for parent_id in parent_ids {
1281 let parent = store.get_commit(parent_id);
1282 let Ok(parent) = parent else {
1283 dependents.push(parent);
1284 continue;
1285 };
1286 if let Some(rewrite) = self.parent_mapping.get(parent.id()) {
1287 for target in rewrite.new_parent_ids() {
1288 if to_visit_set.contains(target) && !visited.contains(target) {
1289 dependents.push(store.get_commit(target));
1290 }
1291 }
1292 }
1293 if to_visit_set.contains(parent.id()) {
1294 dependents.push(Ok(parent));
1295 }
1296 }
1297 dependents
1298 },
1299 |_| panic!("graph has cycle"),
1300 )
1301 }
1302
1303 pub fn transform_descendants(
1315 &mut self,
1316 roots: Vec<CommitId>,
1317 callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1318 ) -> BackendResult<()> {
1319 let options = RewriteRefsOptions::default();
1320 self.transform_descendants_with_options(roots, &HashMap::new(), &options, callback)
1321 }
1322
1323 pub fn transform_descendants_with_options(
1331 &mut self,
1332 roots: Vec<CommitId>,
1333 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1334 options: &RewriteRefsOptions,
1335 callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1336 ) -> BackendResult<()> {
1337 let descendants = self.find_descendants_for_rebase(roots)?;
1338 self.transform_commits(descendants, new_parents_map, options, callback)
1339 }
1340
1341 pub fn transform_commits(
1349 &mut self,
1350 commits: Vec<Commit>,
1351 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1352 options: &RewriteRefsOptions,
1353 mut callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>,
1354 ) -> BackendResult<()> {
1355 let mut to_visit = self.order_commits_for_rebase(commits, new_parents_map)?;
1356 while let Some(old_commit) = to_visit.pop() {
1357 let parent_ids = new_parents_map
1358 .get(old_commit.id())
1359 .map_or(old_commit.parent_ids(), |parent_ids| parent_ids);
1360 let new_parent_ids = self.new_parents(parent_ids);
1361 let rewriter = CommitRewriter::new(self, old_commit, new_parent_ids);
1362 callback(rewriter).block_on()?;
1363 }
1364 self.update_rewritten_references(options)?;
1365 Ok(())
1375 }
1376
1377 pub fn rebase_descendants_with_options(
1393 &mut self,
1394 options: &RebaseOptions,
1395 mut progress: impl FnMut(Commit, RebasedCommit),
1396 ) -> BackendResult<()> {
1397 let roots = self.parent_mapping.keys().cloned().collect();
1398 self.transform_descendants_with_options(
1399 roots,
1400 &HashMap::new(),
1401 &options.rewrite_refs,
1402 async |rewriter| {
1403 if rewriter.parents_changed() {
1404 let old_commit = rewriter.old_commit().clone();
1405 let rebased_commit = rebase_commit_with_options(rewriter, options)?;
1406 progress(old_commit, rebased_commit);
1407 }
1408 Ok(())
1409 },
1410 )?;
1411 self.parent_mapping.clear();
1412 Ok(())
1413 }
1414
1415 pub fn rebase_descendants(&mut self) -> BackendResult<usize> {
1425 let options = RebaseOptions::default();
1426 let mut num_rebased = 0;
1427 self.rebase_descendants_with_options(&options, |_old_commit, _rebased_commit| {
1428 num_rebased += 1;
1429 })?;
1430 Ok(num_rebased)
1431 }
1432
1433 pub fn reparent_descendants(&mut self) -> BackendResult<usize> {
1440 let roots = self.parent_mapping.keys().cloned().collect_vec();
1441 let mut num_reparented = 0;
1442 self.transform_descendants(roots, async |rewriter| {
1443 if rewriter.parents_changed() {
1444 let builder = rewriter.reparent();
1445 builder.write()?;
1446 num_reparented += 1;
1447 }
1448 Ok(())
1449 })?;
1450 self.parent_mapping.clear();
1451 Ok(num_reparented)
1452 }
1453
1454 pub fn set_wc_commit(
1455 &mut self,
1456 name: WorkspaceNameBuf,
1457 commit_id: CommitId,
1458 ) -> Result<(), RewriteRootCommit> {
1459 if &commit_id == self.store().root_commit_id() {
1460 return Err(RewriteRootCommit);
1461 }
1462 self.view_mut().set_wc_commit(name, commit_id);
1463 Ok(())
1464 }
1465
1466 pub fn remove_wc_commit(&mut self, name: &WorkspaceName) -> Result<(), EditCommitError> {
1467 self.maybe_abandon_wc_commit(name)?;
1468 self.view_mut().remove_wc_commit(name);
1469 Ok(())
1470 }
1471
1472 fn merge_wc_commit(
1475 &mut self,
1476 name: &WorkspaceName,
1477 base_id: Option<&CommitId>,
1478 other_id: Option<&CommitId>,
1479 ) {
1480 let view = self.view.get_mut();
1481 let self_id = view.get_wc_commit_id(name);
1482 let new_id = if let Some(resolved) =
1486 trivial_merge(&[self_id, base_id, other_id], SameChange::Accept)
1487 {
1488 resolved.cloned()
1489 } else if self_id.is_none() || other_id.is_none() {
1490 None
1493 } else {
1494 self_id.cloned()
1495 };
1496 match new_id {
1497 Some(id) => view.set_wc_commit(name.to_owned(), id),
1498 None => view.remove_wc_commit(name),
1499 }
1500 }
1501
1502 pub fn rename_workspace(
1503 &mut self,
1504 old_name: &WorkspaceName,
1505 new_name: WorkspaceNameBuf,
1506 ) -> Result<(), RenameWorkspaceError> {
1507 self.view_mut().rename_workspace(old_name, new_name)
1508 }
1509
1510 pub fn check_out(
1511 &mut self,
1512 name: WorkspaceNameBuf,
1513 commit: &Commit,
1514 ) -> Result<Commit, CheckOutCommitError> {
1515 let wc_commit = self
1516 .new_commit(vec![commit.id().clone()], commit.tree())
1517 .write()?;
1518 self.edit(name, &wc_commit)?;
1519 Ok(wc_commit)
1520 }
1521
1522 pub fn edit(&mut self, name: WorkspaceNameBuf, commit: &Commit) -> Result<(), EditCommitError> {
1523 self.maybe_abandon_wc_commit(&name)?;
1524 self.add_head(commit)?;
1525 Ok(self.set_wc_commit(name, commit.id().clone())?)
1526 }
1527
1528 fn maybe_abandon_wc_commit(
1529 &mut self,
1530 workspace_name: &WorkspaceName,
1531 ) -> Result<(), EditCommitError> {
1532 let is_commit_referenced = |view: &View, commit_id: &CommitId| -> bool {
1533 view.wc_commit_ids()
1534 .iter()
1535 .filter(|&(name, _)| name != workspace_name)
1536 .map(|(_, wc_id)| wc_id)
1537 .chain(
1538 view.local_bookmarks()
1539 .flat_map(|(_, target)| target.added_ids()),
1540 )
1541 .any(|id| id == commit_id)
1542 };
1543
1544 let maybe_wc_commit_id = self
1545 .view
1546 .with_ref(|v| v.get_wc_commit_id(workspace_name).cloned());
1547 if let Some(wc_commit_id) = maybe_wc_commit_id {
1548 let wc_commit = self
1549 .store()
1550 .get_commit(&wc_commit_id)
1551 .map_err(EditCommitError::WorkingCopyCommitNotFound)?;
1552 if wc_commit.is_discardable(self)?
1553 && self
1554 .view
1555 .with_ref(|v| !is_commit_referenced(v, wc_commit.id()))
1556 && self.view().heads().contains(wc_commit.id())
1557 {
1558 self.record_abandoned_commit(&wc_commit);
1562 }
1563 }
1564
1565 Ok(())
1566 }
1567
1568 fn enforce_view_invariants(&self, view: &mut View) {
1569 let view = view.store_view_mut();
1570 let root_commit_id = self.store().root_commit_id();
1571 if view.head_ids.is_empty() {
1572 view.head_ids.insert(root_commit_id.clone());
1573 } else if view.head_ids.len() > 1 {
1574 view.head_ids.remove(root_commit_id);
1577 view.head_ids = self
1581 .index()
1582 .heads(&mut view.head_ids.iter())
1583 .unwrap()
1584 .into_iter()
1585 .collect();
1586 }
1587 assert!(!view.head_ids.is_empty());
1588 }
1589
1590 pub fn add_head(&mut self, head: &Commit) -> BackendResult<()> {
1593 self.add_heads(slice::from_ref(head))
1594 }
1595
1596 pub fn add_heads(&mut self, heads: &[Commit]) -> BackendResult<()> {
1603 let current_heads = self.view.get_mut().heads();
1604 match heads {
1608 [] => {}
1609 [head]
1610 if head
1611 .parent_ids()
1612 .iter()
1613 .all(|parent_id| current_heads.contains(parent_id)) =>
1614 {
1615 self.index
1616 .add_commit(head)
1617 .map_err(|err| BackendError::Other(err.into()))?;
1619 self.view.get_mut().add_head(head.id());
1620 for parent_id in head.parent_ids() {
1621 self.view.get_mut().remove_head(parent_id);
1622 }
1623 }
1624 _ => {
1625 let missing_commits = dag_walk::topo_order_reverse_ord_ok(
1626 heads
1627 .iter()
1628 .cloned()
1629 .map(CommitByCommitterTimestamp)
1630 .map(Ok),
1631 |CommitByCommitterTimestamp(commit)| commit.id().clone(),
1632 |CommitByCommitterTimestamp(commit)| {
1633 commit
1634 .parent_ids()
1635 .iter()
1636 .filter_map(|id| match self.index().has_id(id) {
1637 Ok(false) => Some(
1638 self.store().get_commit(id).map(CommitByCommitterTimestamp),
1639 ),
1640 Ok(true) => None,
1641 Err(err) => Some(Err(BackendError::Other(err.into()))),
1643 })
1644 .collect_vec()
1645 },
1646 |_| panic!("graph has cycle"),
1647 )?;
1648 for CommitByCommitterTimestamp(missing_commit) in missing_commits.iter().rev() {
1649 self.index
1650 .add_commit(missing_commit)
1651 .map_err(|err| BackendError::Other(err.into()))?;
1653 }
1654 for head in heads {
1655 self.view.get_mut().add_head(head.id());
1656 }
1657 self.view.mark_dirty();
1658 }
1659 }
1660 Ok(())
1661 }
1662
1663 pub fn remove_head(&mut self, head: &CommitId) {
1664 self.view_mut().remove_head(head);
1665 self.view.mark_dirty();
1666 }
1667
1668 pub fn get_local_bookmark(&self, name: &RefName) -> RefTarget {
1669 self.view.with_ref(|v| v.get_local_bookmark(name).clone())
1670 }
1671
1672 pub fn set_local_bookmark_target(&mut self, name: &RefName, target: RefTarget) {
1673 let view = self.view_mut();
1674 for id in target.added_ids() {
1675 view.add_head(id);
1676 }
1677 view.set_local_bookmark_target(name, target);
1678 self.view.mark_dirty();
1679 }
1680
1681 pub fn merge_local_bookmark(
1682 &mut self,
1683 name: &RefName,
1684 base_target: &RefTarget,
1685 other_target: &RefTarget,
1686 ) -> IndexResult<()> {
1687 let view = self.view.get_mut();
1688 let index = self.index.as_index();
1689 let self_target = view.get_local_bookmark(name);
1690 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1691 self.set_local_bookmark_target(name, new_target);
1692 Ok(())
1693 }
1694
1695 pub fn get_remote_bookmark(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1696 self.view
1697 .with_ref(|v| v.get_remote_bookmark(symbol).clone())
1698 }
1699
1700 pub fn set_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1701 self.view_mut().set_remote_bookmark(symbol, remote_ref);
1702 }
1703
1704 fn merge_remote_bookmark(
1705 &mut self,
1706 symbol: RemoteRefSymbol<'_>,
1707 base_ref: &RemoteRef,
1708 other_ref: &RemoteRef,
1709 ) -> IndexResult<()> {
1710 let view = self.view.get_mut();
1711 let index = self.index.as_index();
1712 let self_ref = view.get_remote_bookmark(symbol);
1713 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref)?;
1714 view.set_remote_bookmark(symbol, new_ref);
1715 Ok(())
1716 }
1717
1718 pub fn track_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) -> IndexResult<()> {
1721 let mut remote_ref = self.get_remote_bookmark(symbol);
1722 let base_target = remote_ref.tracked_target();
1723 self.merge_local_bookmark(symbol.name, base_target, &remote_ref.target)?;
1724 remote_ref.state = RemoteRefState::Tracked;
1725 self.set_remote_bookmark(symbol, remote_ref);
1726 Ok(())
1727 }
1728
1729 pub fn untrack_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1731 let mut remote_ref = self.get_remote_bookmark(symbol);
1732 remote_ref.state = RemoteRefState::New;
1733 self.set_remote_bookmark(symbol, remote_ref);
1734 }
1735
1736 pub fn ensure_remote(&mut self, remote_name: &RemoteName) {
1737 self.view_mut().ensure_remote(remote_name);
1738 }
1739
1740 pub fn remove_remote(&mut self, remote_name: &RemoteName) {
1741 self.view_mut().remove_remote(remote_name);
1742 }
1743
1744 pub fn rename_remote(&mut self, old: &RemoteName, new: &RemoteName) {
1745 self.view_mut().rename_remote(old, new);
1746 }
1747
1748 pub fn get_local_tag(&self, name: &RefName) -> RefTarget {
1749 self.view.with_ref(|v| v.get_local_tag(name).clone())
1750 }
1751
1752 pub fn set_local_tag_target(&mut self, name: &RefName, target: RefTarget) {
1753 self.view_mut().set_local_tag_target(name, target);
1754 }
1755
1756 pub fn merge_local_tag(
1757 &mut self,
1758 name: &RefName,
1759 base_target: &RefTarget,
1760 other_target: &RefTarget,
1761 ) -> IndexResult<()> {
1762 let view = self.view.get_mut();
1763 let index = self.index.as_index();
1764 let self_target = view.get_local_tag(name);
1765 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1766 view.set_local_tag_target(name, new_target);
1767 Ok(())
1768 }
1769
1770 pub fn get_remote_tag(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1771 self.view.with_ref(|v| v.get_remote_tag(symbol).clone())
1772 }
1773
1774 pub fn set_remote_tag(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1775 self.view_mut().set_remote_tag(symbol, remote_ref);
1776 }
1777
1778 fn merge_remote_tag(
1779 &mut self,
1780 symbol: RemoteRefSymbol<'_>,
1781 base_ref: &RemoteRef,
1782 other_ref: &RemoteRef,
1783 ) -> IndexResult<()> {
1784 let view = self.view.get_mut();
1785 let index = self.index.as_index();
1786 let self_ref = view.get_remote_tag(symbol);
1787 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref)?;
1788 view.set_remote_tag(symbol, new_ref);
1789 Ok(())
1790 }
1791
1792 pub fn get_git_ref(&self, name: &GitRefName) -> RefTarget {
1793 self.view.with_ref(|v| v.get_git_ref(name).clone())
1794 }
1795
1796 pub fn set_git_ref_target(&mut self, name: &GitRefName, target: RefTarget) {
1797 self.view_mut().set_git_ref_target(name, target);
1798 }
1799
1800 fn merge_git_ref(
1801 &mut self,
1802 name: &GitRefName,
1803 base_target: &RefTarget,
1804 other_target: &RefTarget,
1805 ) -> IndexResult<()> {
1806 let view = self.view.get_mut();
1807 let index = self.index.as_index();
1808 let self_target = view.get_git_ref(name);
1809 let new_target = merge_ref_targets(index, self_target, base_target, other_target)?;
1810 view.set_git_ref_target(name, new_target);
1811 Ok(())
1812 }
1813
1814 pub fn git_head(&self) -> RefTarget {
1815 self.view.with_ref(|v| v.git_head().clone())
1816 }
1817
1818 pub fn set_git_head_target(&mut self, target: RefTarget) {
1819 self.view_mut().set_git_head_target(target);
1820 }
1821
1822 pub fn set_view(&mut self, data: op_store::View) {
1823 self.view_mut().set_view(data);
1824 self.view.mark_dirty();
1825 }
1826
1827 pub fn merge(
1828 &mut self,
1829 base_repo: &ReadonlyRepo,
1830 other_repo: &ReadonlyRepo,
1831 ) -> Result<(), RepoLoaderError> {
1832 self.index.merge_in(base_repo.readonly_index())?;
1837 self.index.merge_in(other_repo.readonly_index())?;
1838
1839 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
1840 self.merge_view(&base_repo.view, &other_repo.view)?;
1841 self.view.mark_dirty();
1842 Ok(())
1843 }
1844
1845 pub fn merge_index(&mut self, other_repo: &ReadonlyRepo) -> IndexResult<()> {
1846 self.index.merge_in(other_repo.readonly_index())
1847 }
1848
1849 fn merge_view(&mut self, base: &View, other: &View) -> Result<(), RepoLoaderError> {
1850 let changed_wc_commits = diff_named_commit_ids(base.wc_commit_ids(), other.wc_commit_ids());
1851 for (name, (base_id, other_id)) in changed_wc_commits {
1852 self.merge_wc_commit(name, base_id, other_id);
1853 }
1854
1855 let base_heads = base.heads().iter().cloned().collect_vec();
1856 let own_heads = self.view().heads().iter().cloned().collect_vec();
1857 let other_heads = other.heads().iter().cloned().collect_vec();
1858
1859 if self.is_backed_by_default_index() {
1866 self.record_rewrites(&base_heads, &own_heads)?;
1867 self.record_rewrites(&base_heads, &other_heads)?;
1868 } else {
1871 for removed_head in base.heads().difference(other.heads()) {
1872 self.view_mut().remove_head(removed_head);
1873 }
1874 }
1875 for added_head in other.heads().difference(base.heads()) {
1876 self.view_mut().add_head(added_head);
1877 }
1878
1879 let changed_local_bookmarks =
1880 diff_named_ref_targets(base.local_bookmarks(), other.local_bookmarks());
1881 for (name, (base_target, other_target)) in changed_local_bookmarks {
1882 self.merge_local_bookmark(name, base_target, other_target)?;
1883 }
1884
1885 let changed_local_tags = diff_named_ref_targets(base.local_tags(), other.local_tags());
1886 for (name, (base_target, other_target)) in changed_local_tags {
1887 self.merge_local_tag(name, base_target, other_target)?;
1888 }
1889
1890 let changed_git_refs = diff_named_ref_targets(base.git_refs(), other.git_refs());
1891 for (name, (base_target, other_target)) in changed_git_refs {
1892 self.merge_git_ref(name, base_target, other_target)?;
1893 }
1894
1895 let changed_remote_bookmarks =
1896 diff_named_remote_refs(base.all_remote_bookmarks(), other.all_remote_bookmarks());
1897 for (symbol, (base_ref, other_ref)) in changed_remote_bookmarks {
1898 self.merge_remote_bookmark(symbol, base_ref, other_ref)?;
1899 }
1900
1901 let changed_remote_tags =
1902 diff_named_remote_refs(base.all_remote_tags(), other.all_remote_tags());
1903 for (symbol, (base_ref, other_ref)) in changed_remote_tags {
1904 self.merge_remote_tag(symbol, base_ref, other_ref)?;
1905 }
1906
1907 let new_git_head_target = merge_ref_targets(
1908 self.index(),
1909 self.view().git_head(),
1910 base.git_head(),
1911 other.git_head(),
1912 )?;
1913 self.set_git_head_target(new_git_head_target);
1914
1915 Ok(())
1916 }
1917
1918 fn record_rewrites(
1921 &mut self,
1922 old_heads: &[CommitId],
1923 new_heads: &[CommitId],
1924 ) -> BackendResult<()> {
1925 let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
1926 for item in revset::walk_revs(self, old_heads, new_heads)
1927 .map_err(|err| err.into_backend_error())?
1928 .commit_change_ids()
1929 {
1930 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1931 removed_changes
1932 .entry(change_id)
1933 .or_default()
1934 .push(commit_id);
1935 }
1936 if removed_changes.is_empty() {
1937 return Ok(());
1938 }
1939
1940 let mut rewritten_changes = HashSet::new();
1941 let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1942 for item in revset::walk_revs(self, new_heads, old_heads)
1943 .map_err(|err| err.into_backend_error())?
1944 .commit_change_ids()
1945 {
1946 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1947 if let Some(old_commits) = removed_changes.get(&change_id) {
1948 for old_commit in old_commits {
1949 rewritten_commits
1950 .entry(old_commit.clone())
1951 .or_default()
1952 .push(commit_id.clone());
1953 }
1954 }
1955 rewritten_changes.insert(change_id);
1956 }
1957 for (old_commit, new_commits) in rewritten_commits {
1958 if new_commits.len() == 1 {
1959 self.set_rewritten_commit(
1960 old_commit.clone(),
1961 new_commits.into_iter().next().unwrap(),
1962 );
1963 } else {
1964 self.set_divergent_rewrite(old_commit.clone(), new_commits);
1965 }
1966 }
1967
1968 for (change_id, removed_commit_ids) in &removed_changes {
1969 if !rewritten_changes.contains(change_id) {
1970 for id in removed_commit_ids {
1971 let commit = self.store().get_commit(id)?;
1972 self.record_abandoned_commit(&commit);
1973 }
1974 }
1975 }
1976
1977 Ok(())
1978 }
1979}
1980
1981impl Repo for MutableRepo {
1982 fn base_repo(&self) -> &ReadonlyRepo {
1983 &self.base_repo
1984 }
1985
1986 fn store(&self) -> &Arc<Store> {
1987 self.base_repo.store()
1988 }
1989
1990 fn op_store(&self) -> &Arc<dyn OpStore> {
1991 self.base_repo.op_store()
1992 }
1993
1994 fn index(&self) -> &dyn Index {
1995 self.index.as_index()
1996 }
1997
1998 fn view(&self) -> &View {
1999 self.view
2000 .get_or_ensure_clean(|v| self.enforce_view_invariants(v))
2001 }
2002
2003 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
2004 self.base_repo.submodule_store()
2005 }
2006
2007 fn resolve_change_id_prefix(
2008 &self,
2009 prefix: &HexPrefix,
2010 ) -> IndexResult<PrefixResolution<Vec<CommitId>>> {
2011 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
2012 change_id_index.resolve_prefix(prefix)
2013 }
2014
2015 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> IndexResult<usize> {
2016 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
2017 change_id_index.shortest_unique_prefix_len(target_id)
2018 }
2019}
2020
2021#[derive(Debug, Error)]
2023#[error("Cannot rewrite the root commit")]
2024pub struct RewriteRootCommit;
2025
2026#[derive(Debug, Error)]
2028pub enum EditCommitError {
2029 #[error("Current working-copy commit not found")]
2030 WorkingCopyCommitNotFound(#[source] BackendError),
2031 #[error(transparent)]
2032 RewriteRootCommit(#[from] RewriteRootCommit),
2033 #[error(transparent)]
2034 BackendError(#[from] BackendError),
2035}
2036
2037#[derive(Debug, Error)]
2039pub enum CheckOutCommitError {
2040 #[error("Failed to create new working-copy commit")]
2041 CreateCommit(#[from] BackendError),
2042 #[error("Failed to edit commit")]
2043 EditCommit(#[from] EditCommitError),
2044}
2045
2046mod dirty_cell {
2047 use std::cell::OnceCell;
2048 use std::cell::RefCell;
2049
2050 #[derive(Clone, Debug)]
2054 pub struct DirtyCell<T> {
2055 clean: OnceCell<Box<T>>,
2058 dirty: RefCell<Option<Box<T>>>,
2059 }
2060
2061 impl<T> DirtyCell<T> {
2062 pub fn with_clean(value: T) -> Self {
2063 Self {
2064 clean: OnceCell::from(Box::new(value)),
2065 dirty: RefCell::new(None),
2066 }
2067 }
2068
2069 pub fn get_or_ensure_clean(&self, f: impl FnOnce(&mut T)) -> &T {
2070 self.clean.get_or_init(|| {
2071 let mut value = self.dirty.borrow_mut().take().unwrap();
2073 f(&mut value);
2074 value
2075 })
2076 }
2077
2078 pub fn ensure_clean(&self, f: impl FnOnce(&mut T)) {
2079 self.get_or_ensure_clean(f);
2080 }
2081
2082 pub fn into_inner(self) -> T {
2083 *self
2084 .clean
2085 .into_inner()
2086 .or_else(|| self.dirty.into_inner())
2087 .unwrap()
2088 }
2089
2090 pub fn with_ref<R>(&self, f: impl FnOnce(&T) -> R) -> R {
2091 if let Some(value) = self.clean.get() {
2092 f(value)
2093 } else {
2094 f(self.dirty.borrow().as_ref().unwrap())
2095 }
2096 }
2097
2098 pub fn get_mut(&mut self) -> &mut T {
2099 self.clean
2100 .get_mut()
2101 .or_else(|| self.dirty.get_mut().as_mut())
2102 .unwrap()
2103 }
2104
2105 pub fn mark_dirty(&mut self) {
2106 if let Some(value) = self.clean.take() {
2107 *self.dirty.get_mut() = Some(value);
2108 }
2109 }
2110 }
2111}