1#![allow(missing_docs)]
16
17use std::collections::hash_map::Entry;
18use std::collections::HashMap;
19use std::collections::HashSet;
20use std::fmt::Debug;
21use std::fmt::Formatter;
22use std::fs;
23use std::path::Path;
24use std::slice;
25use std::sync::Arc;
26
27use itertools::Itertools;
28use once_cell::sync::OnceCell;
29use thiserror::Error;
30use tracing::instrument;
31
32use self::dirty_cell::DirtyCell;
33use crate::backend::Backend;
34use crate::backend::BackendError;
35use crate::backend::BackendInitError;
36use crate::backend::BackendLoadError;
37use crate::backend::BackendResult;
38use crate::backend::ChangeId;
39use crate::backend::CommitId;
40use crate::backend::MergedTreeId;
41use crate::commit::Commit;
42use crate::commit::CommitByCommitterTimestamp;
43use crate::commit_builder::CommitBuilder;
44use crate::commit_builder::DetachedCommitBuilder;
45use crate::dag_walk;
46use crate::default_index::DefaultIndexStore;
47use crate::default_index::DefaultMutableIndex;
48use crate::default_submodule_store::DefaultSubmoduleStore;
49use crate::file_util::IoResultExt as _;
50use crate::file_util::PathError;
51use crate::index::ChangeIdIndex;
52use crate::index::Index;
53use crate::index::IndexReadError;
54use crate::index::IndexStore;
55use crate::index::MutableIndex;
56use crate::index::ReadonlyIndex;
57use crate::local_backend::LocalBackend;
58use crate::merge::MergeBuilder;
59use crate::object_id::HexPrefix;
60use crate::object_id::ObjectId;
61use crate::object_id::PrefixResolution;
62use crate::op_heads_store;
63use crate::op_heads_store::OpHeadResolutionError;
64use crate::op_heads_store::OpHeadsStore;
65use crate::op_heads_store::OpHeadsStoreError;
66use crate::op_store;
67use crate::op_store::OpStore;
68use crate::op_store::OpStoreError;
69use crate::op_store::OpStoreResult;
70use crate::op_store::OperationId;
71use crate::op_store::RefTarget;
72use crate::op_store::RemoteRef;
73use crate::op_store::RemoteRefState;
74use crate::op_store::RootOperationData;
75use crate::op_store::WorkspaceId;
76use crate::operation::Operation;
77use crate::refs::diff_named_ref_targets;
78use crate::refs::diff_named_remote_refs;
79use crate::refs::merge_ref_targets;
80use crate::refs::merge_remote_refs;
81use crate::refs::RemoteRefSymbol;
82use crate::revset;
83use crate::revset::RevsetExpression;
84use crate::revset::RevsetIteratorExt;
85use crate::rewrite::merge_commit_trees;
86use crate::rewrite::rebase_commit_with_options;
87use crate::rewrite::CommitRewriter;
88use crate::rewrite::RebaseOptions;
89use crate::rewrite::RebasedCommit;
90use crate::rewrite::RewriteRefsOptions;
91use crate::settings::UserSettings;
92use crate::signing::SignInitError;
93use crate::signing::Signer;
94use crate::simple_op_heads_store::SimpleOpHeadsStore;
95use crate::simple_op_store::SimpleOpStore;
96use crate::store::Store;
97use crate::submodule_store::SubmoduleStore;
98use crate::transaction::Transaction;
99use crate::view::RenameWorkspaceError;
100use crate::view::View;
101
102pub trait Repo {
103 fn base_repo(&self) -> &ReadonlyRepo;
106
107 fn store(&self) -> &Arc<Store>;
108
109 fn op_store(&self) -> &Arc<dyn OpStore>;
110
111 fn index(&self) -> &dyn Index;
112
113 fn view(&self) -> &View;
114
115 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore>;
116
117 fn resolve_change_id(&self, change_id: &ChangeId) -> Option<Vec<CommitId>> {
118 let prefix = HexPrefix::from_bytes(change_id.as_bytes());
120 match self.resolve_change_id_prefix(&prefix) {
121 PrefixResolution::NoMatch => None,
122 PrefixResolution::SingleMatch(entries) => Some(entries),
123 PrefixResolution::AmbiguousMatch => panic!("complete change_id should be unambiguous"),
124 }
125 }
126
127 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>>;
128
129 fn shortest_unique_change_id_prefix_len(&self, target_id_bytes: &ChangeId) -> usize;
130}
131
132pub struct ReadonlyRepo {
133 loader: RepoLoader,
134 operation: Operation,
135 index: Box<dyn ReadonlyIndex>,
136 change_id_index: OnceCell<Box<dyn ChangeIdIndex>>,
137 view: View,
139}
140
141impl Debug for ReadonlyRepo {
142 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
143 f.debug_struct("ReadonlyRepo")
144 .field("store", &self.loader.store)
145 .finish_non_exhaustive()
146 }
147}
148
149#[derive(Error, Debug)]
150pub enum RepoInitError {
151 #[error(transparent)]
152 Backend(#[from] BackendInitError),
153 #[error(transparent)]
154 OpHeadsStore(#[from] OpHeadsStoreError),
155 #[error(transparent)]
156 Path(#[from] PathError),
157}
158
159impl ReadonlyRepo {
160 pub fn default_op_store_initializer() -> &'static OpStoreInitializer<'static> {
161 &|_settings, store_path, root_data| {
162 Ok(Box::new(SimpleOpStore::init(store_path, root_data)?))
163 }
164 }
165
166 pub fn default_op_heads_store_initializer() -> &'static OpHeadsStoreInitializer<'static> {
167 &|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::init(store_path)?))
168 }
169
170 pub fn default_index_store_initializer() -> &'static IndexStoreInitializer<'static> {
171 &|_settings, store_path| Ok(Box::new(DefaultIndexStore::init(store_path)?))
172 }
173
174 pub fn default_submodule_store_initializer() -> &'static SubmoduleStoreInitializer<'static> {
175 &|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::init(store_path)))
176 }
177
178 #[allow(clippy::too_many_arguments)]
179 pub fn init(
180 settings: &UserSettings,
181 repo_path: &Path,
182 backend_initializer: &BackendInitializer,
183 signer: Signer,
184 op_store_initializer: &OpStoreInitializer,
185 op_heads_store_initializer: &OpHeadsStoreInitializer,
186 index_store_initializer: &IndexStoreInitializer,
187 submodule_store_initializer: &SubmoduleStoreInitializer,
188 ) -> Result<Arc<ReadonlyRepo>, RepoInitError> {
189 let repo_path = dunce::canonicalize(repo_path).context(repo_path)?;
190
191 let store_path = repo_path.join("store");
192 fs::create_dir(&store_path).context(&store_path)?;
193 let backend = backend_initializer(settings, &store_path)?;
194 let backend_path = store_path.join("type");
195 fs::write(&backend_path, backend.name()).context(&backend_path)?;
196 let store = Store::new(backend, signer);
197
198 let op_store_path = repo_path.join("op_store");
199 fs::create_dir(&op_store_path).context(&op_store_path)?;
200 let root_op_data = RootOperationData {
201 root_commit_id: store.root_commit_id().clone(),
202 };
203 let op_store = op_store_initializer(settings, &op_store_path, root_op_data)?;
204 let op_store_type_path = op_store_path.join("type");
205 fs::write(&op_store_type_path, op_store.name()).context(&op_store_type_path)?;
206 let op_store: Arc<dyn OpStore> = Arc::from(op_store);
207
208 let op_heads_path = repo_path.join("op_heads");
209 fs::create_dir(&op_heads_path).context(&op_heads_path)?;
210 let op_heads_store = op_heads_store_initializer(settings, &op_heads_path)?;
211 let op_heads_type_path = op_heads_path.join("type");
212 fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?;
213 op_heads_store.update_op_heads(&[], op_store.root_operation_id())?;
214 let op_heads_store: Arc<dyn OpHeadsStore> = Arc::from(op_heads_store);
215
216 let index_path = repo_path.join("index");
217 fs::create_dir(&index_path).context(&index_path)?;
218 let index_store = index_store_initializer(settings, &index_path)?;
219 let index_type_path = index_path.join("type");
220 fs::write(&index_type_path, index_store.name()).context(&index_type_path)?;
221 let index_store: Arc<dyn IndexStore> = Arc::from(index_store);
222
223 let submodule_store_path = repo_path.join("submodule_store");
224 fs::create_dir(&submodule_store_path).context(&submodule_store_path)?;
225 let submodule_store = submodule_store_initializer(settings, &submodule_store_path)?;
226 let submodule_store_type_path = submodule_store_path.join("type");
227 fs::write(&submodule_store_type_path, submodule_store.name())
228 .context(&submodule_store_type_path)?;
229 let submodule_store = Arc::from(submodule_store);
230
231 let loader = RepoLoader {
232 settings: settings.clone(),
233 store,
234 op_store,
235 op_heads_store,
236 index_store,
237 submodule_store,
238 };
239
240 let root_operation = loader.root_operation();
241 let root_view = root_operation.view().expect("failed to read root view");
242 assert!(!root_view.heads().is_empty());
243 let index = loader
244 .index_store
245 .get_index_at_op(&root_operation, &loader.store)
246 .map_err(|err| BackendInitError(err.into()))?;
249 Ok(Arc::new(ReadonlyRepo {
250 loader,
251 operation: root_operation,
252 index,
253 change_id_index: OnceCell::new(),
254 view: root_view,
255 }))
256 }
257
258 pub fn loader(&self) -> &RepoLoader {
259 &self.loader
260 }
261
262 pub fn op_id(&self) -> &OperationId {
263 self.operation.id()
264 }
265
266 pub fn operation(&self) -> &Operation {
267 &self.operation
268 }
269
270 pub fn view(&self) -> &View {
271 &self.view
272 }
273
274 pub fn readonly_index(&self) -> &dyn ReadonlyIndex {
275 self.index.as_ref()
276 }
277
278 fn change_id_index(&self) -> &dyn ChangeIdIndex {
279 self.change_id_index
280 .get_or_init(|| {
281 self.readonly_index()
282 .change_id_index(&mut self.view().heads().iter())
283 })
284 .as_ref()
285 }
286
287 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
288 self.loader.op_heads_store()
289 }
290
291 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
292 self.loader.index_store()
293 }
294
295 pub fn settings(&self) -> &UserSettings {
296 self.loader.settings()
297 }
298
299 pub fn start_transaction(self: &Arc<ReadonlyRepo>) -> Transaction {
300 let mut_repo = MutableRepo::new(self.clone(), self.readonly_index(), &self.view);
301 Transaction::new(mut_repo, self.settings())
302 }
303
304 pub fn reload_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
305 self.loader().load_at_head()
306 }
307
308 #[instrument]
309 pub fn reload_at(&self, operation: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
310 self.loader().load_at(operation)
311 }
312}
313
314impl Repo for ReadonlyRepo {
315 fn base_repo(&self) -> &ReadonlyRepo {
316 self
317 }
318
319 fn store(&self) -> &Arc<Store> {
320 self.loader.store()
321 }
322
323 fn op_store(&self) -> &Arc<dyn OpStore> {
324 self.loader.op_store()
325 }
326
327 fn index(&self) -> &dyn Index {
328 self.readonly_index().as_index()
329 }
330
331 fn view(&self) -> &View {
332 &self.view
333 }
334
335 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
336 self.loader.submodule_store()
337 }
338
339 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>> {
340 self.change_id_index().resolve_prefix(prefix)
341 }
342
343 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
344 self.change_id_index().shortest_unique_prefix_len(target_id)
345 }
346}
347
348pub type BackendInitializer<'a> =
349 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendInitError> + 'a;
350#[rustfmt::skip] pub type OpStoreInitializer<'a> =
352 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendInitError>
353 + 'a;
354pub type OpHeadsStoreInitializer<'a> =
355 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendInitError> + 'a;
356pub type IndexStoreInitializer<'a> =
357 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendInitError> + 'a;
358pub type SubmoduleStoreInitializer<'a> =
359 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendInitError> + 'a;
360
361type BackendFactory =
362 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendLoadError>>;
363type OpStoreFactory = Box<
364 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendLoadError>,
365>;
366type OpHeadsStoreFactory =
367 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendLoadError>>;
368type IndexStoreFactory =
369 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendLoadError>>;
370type SubmoduleStoreFactory =
371 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendLoadError>>;
372
373pub fn merge_factories_map<F>(base: &mut HashMap<String, F>, ext: HashMap<String, F>) {
374 for (name, factory) in ext {
375 match base.entry(name) {
376 Entry::Vacant(v) => {
377 v.insert(factory);
378 }
379 Entry::Occupied(o) => {
380 panic!("Conflicting factory definitions for '{}' factory", o.key())
381 }
382 }
383 }
384}
385
386pub struct StoreFactories {
387 backend_factories: HashMap<String, BackendFactory>,
388 op_store_factories: HashMap<String, OpStoreFactory>,
389 op_heads_store_factories: HashMap<String, OpHeadsStoreFactory>,
390 index_store_factories: HashMap<String, IndexStoreFactory>,
391 submodule_store_factories: HashMap<String, SubmoduleStoreFactory>,
392}
393
394impl Default for StoreFactories {
395 fn default() -> Self {
396 let mut factories = StoreFactories::empty();
397
398 factories.add_backend(
400 LocalBackend::name(),
401 Box::new(|_settings, store_path| Ok(Box::new(LocalBackend::load(store_path)))),
402 );
403 #[cfg(feature = "git")]
404 factories.add_backend(
405 crate::git_backend::GitBackend::name(),
406 Box::new(|settings, store_path| {
407 Ok(Box::new(crate::git_backend::GitBackend::load(
408 settings, store_path,
409 )?))
410 }),
411 );
412 #[cfg(feature = "testing")]
413 factories.add_backend(
414 crate::secret_backend::SecretBackend::name(),
415 Box::new(|settings, store_path| {
416 Ok(Box::new(crate::secret_backend::SecretBackend::load(
417 settings, store_path,
418 )?))
419 }),
420 );
421
422 factories.add_op_store(
424 SimpleOpStore::name(),
425 Box::new(|_settings, store_path, root_data| {
426 Ok(Box::new(SimpleOpStore::load(store_path, root_data)))
427 }),
428 );
429
430 factories.add_op_heads_store(
432 SimpleOpHeadsStore::name(),
433 Box::new(|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::load(store_path)))),
434 );
435
436 factories.add_index_store(
438 DefaultIndexStore::name(),
439 Box::new(|_settings, store_path| Ok(Box::new(DefaultIndexStore::load(store_path)))),
440 );
441
442 factories.add_submodule_store(
444 DefaultSubmoduleStore::name(),
445 Box::new(|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::load(store_path)))),
446 );
447
448 factories
449 }
450}
451
452#[derive(Debug, Error)]
453pub enum StoreLoadError {
454 #[error("Unsupported {store} backend type '{store_type}'")]
455 UnsupportedType {
456 store: &'static str,
457 store_type: String,
458 },
459 #[error("Failed to read {store} backend type")]
460 ReadError {
461 store: &'static str,
462 source: PathError,
463 },
464 #[error(transparent)]
465 Backend(#[from] BackendLoadError),
466 #[error(transparent)]
467 Signing(#[from] SignInitError),
468}
469
470impl StoreFactories {
471 pub fn empty() -> Self {
472 StoreFactories {
473 backend_factories: HashMap::new(),
474 op_store_factories: HashMap::new(),
475 op_heads_store_factories: HashMap::new(),
476 index_store_factories: HashMap::new(),
477 submodule_store_factories: HashMap::new(),
478 }
479 }
480
481 pub fn merge(&mut self, ext: StoreFactories) {
482 let StoreFactories {
483 backend_factories,
484 op_store_factories,
485 op_heads_store_factories,
486 index_store_factories,
487 submodule_store_factories,
488 } = ext;
489
490 merge_factories_map(&mut self.backend_factories, backend_factories);
491 merge_factories_map(&mut self.op_store_factories, op_store_factories);
492 merge_factories_map(&mut self.op_heads_store_factories, op_heads_store_factories);
493 merge_factories_map(&mut self.index_store_factories, index_store_factories);
494 merge_factories_map(
495 &mut self.submodule_store_factories,
496 submodule_store_factories,
497 );
498 }
499
500 pub fn add_backend(&mut self, name: &str, factory: BackendFactory) {
501 self.backend_factories.insert(name.to_string(), factory);
502 }
503
504 pub fn load_backend(
505 &self,
506 settings: &UserSettings,
507 store_path: &Path,
508 ) -> Result<Box<dyn Backend>, StoreLoadError> {
509 let backend_type = read_store_type("commit", store_path.join("type"))?;
510 let backend_factory = self.backend_factories.get(&backend_type).ok_or_else(|| {
511 StoreLoadError::UnsupportedType {
512 store: "commit",
513 store_type: backend_type.to_string(),
514 }
515 })?;
516 Ok(backend_factory(settings, store_path)?)
517 }
518
519 pub fn add_op_store(&mut self, name: &str, factory: OpStoreFactory) {
520 self.op_store_factories.insert(name.to_string(), factory);
521 }
522
523 pub fn load_op_store(
524 &self,
525 settings: &UserSettings,
526 store_path: &Path,
527 root_data: RootOperationData,
528 ) -> Result<Box<dyn OpStore>, StoreLoadError> {
529 let op_store_type = read_store_type("operation", store_path.join("type"))?;
530 let op_store_factory = self.op_store_factories.get(&op_store_type).ok_or_else(|| {
531 StoreLoadError::UnsupportedType {
532 store: "operation",
533 store_type: op_store_type.to_string(),
534 }
535 })?;
536 Ok(op_store_factory(settings, store_path, root_data)?)
537 }
538
539 pub fn add_op_heads_store(&mut self, name: &str, factory: OpHeadsStoreFactory) {
540 self.op_heads_store_factories
541 .insert(name.to_string(), factory);
542 }
543
544 pub fn load_op_heads_store(
545 &self,
546 settings: &UserSettings,
547 store_path: &Path,
548 ) -> Result<Box<dyn OpHeadsStore>, StoreLoadError> {
549 let op_heads_store_type = read_store_type("operation heads", store_path.join("type"))?;
550 let op_heads_store_factory = self
551 .op_heads_store_factories
552 .get(&op_heads_store_type)
553 .ok_or_else(|| StoreLoadError::UnsupportedType {
554 store: "operation heads",
555 store_type: op_heads_store_type.to_string(),
556 })?;
557 Ok(op_heads_store_factory(settings, store_path)?)
558 }
559
560 pub fn add_index_store(&mut self, name: &str, factory: IndexStoreFactory) {
561 self.index_store_factories.insert(name.to_string(), factory);
562 }
563
564 pub fn load_index_store(
565 &self,
566 settings: &UserSettings,
567 store_path: &Path,
568 ) -> Result<Box<dyn IndexStore>, StoreLoadError> {
569 let index_store_type = read_store_type("index", store_path.join("type"))?;
570 let index_store_factory = self
571 .index_store_factories
572 .get(&index_store_type)
573 .ok_or_else(|| StoreLoadError::UnsupportedType {
574 store: "index",
575 store_type: index_store_type.to_string(),
576 })?;
577 Ok(index_store_factory(settings, store_path)?)
578 }
579
580 pub fn add_submodule_store(&mut self, name: &str, factory: SubmoduleStoreFactory) {
581 self.submodule_store_factories
582 .insert(name.to_string(), factory);
583 }
584
585 pub fn load_submodule_store(
586 &self,
587 settings: &UserSettings,
588 store_path: &Path,
589 ) -> Result<Box<dyn SubmoduleStore>, StoreLoadError> {
590 let submodule_store_type = read_store_type("submodule_store", store_path.join("type"))?;
591 let submodule_store_factory = self
592 .submodule_store_factories
593 .get(&submodule_store_type)
594 .ok_or_else(|| StoreLoadError::UnsupportedType {
595 store: "submodule_store",
596 store_type: submodule_store_type.to_string(),
597 })?;
598
599 Ok(submodule_store_factory(settings, store_path)?)
600 }
601}
602
603pub fn read_store_type(
604 store: &'static str,
605 path: impl AsRef<Path>,
606) -> Result<String, StoreLoadError> {
607 let path = path.as_ref();
608 fs::read_to_string(path)
609 .context(path)
610 .map_err(|source| StoreLoadError::ReadError { store, source })
611}
612
613#[derive(Debug, Error)]
614pub enum RepoLoaderError {
615 #[error(transparent)]
616 Backend(#[from] BackendError),
617 #[error(transparent)]
618 IndexRead(#[from] IndexReadError),
619 #[error(transparent)]
620 OpHeadResolution(#[from] OpHeadResolutionError),
621 #[error(transparent)]
622 OpHeadsStoreError(#[from] OpHeadsStoreError),
623 #[error(transparent)]
624 OpStore(#[from] OpStoreError),
625}
626
627#[derive(Clone)]
630pub struct RepoLoader {
631 settings: UserSettings,
632 store: Arc<Store>,
633 op_store: Arc<dyn OpStore>,
634 op_heads_store: Arc<dyn OpHeadsStore>,
635 index_store: Arc<dyn IndexStore>,
636 submodule_store: Arc<dyn SubmoduleStore>,
637}
638
639impl RepoLoader {
640 pub fn new(
641 settings: UserSettings,
642 store: Arc<Store>,
643 op_store: Arc<dyn OpStore>,
644 op_heads_store: Arc<dyn OpHeadsStore>,
645 index_store: Arc<dyn IndexStore>,
646 submodule_store: Arc<dyn SubmoduleStore>,
647 ) -> Self {
648 Self {
649 settings,
650 store,
651 op_store,
652 op_heads_store,
653 index_store,
654 submodule_store,
655 }
656 }
657
658 pub fn init_from_file_system(
662 settings: &UserSettings,
663 repo_path: &Path,
664 store_factories: &StoreFactories,
665 ) -> Result<Self, StoreLoadError> {
666 let store = Store::new(
667 store_factories.load_backend(settings, &repo_path.join("store"))?,
668 Signer::from_settings(settings)?,
669 );
670 let root_op_data = RootOperationData {
671 root_commit_id: store.root_commit_id().clone(),
672 };
673 let op_store = Arc::from(store_factories.load_op_store(
674 settings,
675 &repo_path.join("op_store"),
676 root_op_data,
677 )?);
678 let op_heads_store =
679 Arc::from(store_factories.load_op_heads_store(settings, &repo_path.join("op_heads"))?);
680 let index_store =
681 Arc::from(store_factories.load_index_store(settings, &repo_path.join("index"))?);
682 let submodule_store = Arc::from(
683 store_factories.load_submodule_store(settings, &repo_path.join("submodule_store"))?,
684 );
685 Ok(Self {
686 settings: settings.clone(),
687 store,
688 op_store,
689 op_heads_store,
690 index_store,
691 submodule_store,
692 })
693 }
694
695 pub fn settings(&self) -> &UserSettings {
696 &self.settings
697 }
698
699 pub fn store(&self) -> &Arc<Store> {
700 &self.store
701 }
702
703 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
704 &self.index_store
705 }
706
707 pub fn op_store(&self) -> &Arc<dyn OpStore> {
708 &self.op_store
709 }
710
711 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
712 &self.op_heads_store
713 }
714
715 pub fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
716 &self.submodule_store
717 }
718
719 pub fn load_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
720 let op = op_heads_store::resolve_op_heads(
721 self.op_heads_store.as_ref(),
722 &self.op_store,
723 |op_heads| self._resolve_op_heads(op_heads),
724 )?;
725 let view = op.view()?;
726 self._finish_load(op, view)
727 }
728
729 #[instrument(skip(self))]
730 pub fn load_at(&self, op: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
731 let view = op.view()?;
732 self._finish_load(op.clone(), view)
733 }
734
735 pub fn create_from(
736 &self,
737 operation: Operation,
738 view: View,
739 index: Box<dyn ReadonlyIndex>,
740 ) -> Arc<ReadonlyRepo> {
741 let repo = ReadonlyRepo {
742 loader: self.clone(),
743 operation,
744 index,
745 change_id_index: OnceCell::new(),
746 view,
747 };
748 Arc::new(repo)
749 }
750
751 pub fn root_operation(&self) -> Operation {
756 self.load_operation(self.op_store.root_operation_id())
757 .expect("failed to read root operation")
758 }
759
760 pub fn load_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
762 let data = self.op_store.read_operation(id)?;
763 Ok(Operation::new(self.op_store.clone(), id.clone(), data))
764 }
765
766 pub fn merge_operations(
769 &self,
770 operations: Vec<Operation>,
771 tx_description: Option<&str>,
772 ) -> Result<Operation, RepoLoaderError> {
773 let num_operations = operations.len();
774 let mut operations = operations.into_iter();
775 let Some(base_op) = operations.next() else {
776 return Ok(self.root_operation());
777 };
778 let final_op = if num_operations > 1 {
779 let base_repo = self.load_at(&base_op)?;
780 let mut tx = base_repo.start_transaction();
781 for other_op in operations {
782 tx.merge_operation(other_op)?;
783 tx.repo_mut().rebase_descendants()?;
784 }
785 let tx_description = tx_description.map_or_else(
786 || format!("merge {num_operations} operations"),
787 |tx_description| tx_description.to_string(),
788 );
789 let merged_repo = tx.write(tx_description).leave_unpublished();
790 merged_repo.operation().clone()
791 } else {
792 base_op
793 };
794
795 Ok(final_op)
796 }
797
798 fn _resolve_op_heads(&self, op_heads: Vec<Operation>) -> Result<Operation, RepoLoaderError> {
799 assert!(!op_heads.is_empty());
800 self.merge_operations(op_heads, Some("reconcile divergent operations"))
801 }
802
803 fn _finish_load(
804 &self,
805 operation: Operation,
806 view: View,
807 ) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
808 let index = self.index_store.get_index_at_op(&operation, &self.store)?;
809 let repo = ReadonlyRepo {
810 loader: self.clone(),
811 operation,
812 index,
813 change_id_index: OnceCell::new(),
814 view,
815 };
816 Ok(Arc::new(repo))
817 }
818}
819
820#[derive(Clone, Debug, PartialEq, Eq)]
821enum Rewrite {
822 Rewritten(CommitId),
825 Divergent(Vec<CommitId>),
828 Abandoned(Vec<CommitId>),
831}
832
833impl Rewrite {
834 fn new_parent_ids(&self) -> &[CommitId] {
835 match self {
836 Rewrite::Rewritten(new_parent_id) => std::slice::from_ref(new_parent_id),
837 Rewrite::Divergent(new_parent_ids) => new_parent_ids.as_slice(),
838 Rewrite::Abandoned(new_parent_ids) => new_parent_ids.as_slice(),
839 }
840 }
841}
842
843pub struct MutableRepo {
844 base_repo: Arc<ReadonlyRepo>,
845 index: Box<dyn MutableIndex>,
846 view: DirtyCell<View>,
847 parent_mapping: HashMap<CommitId, Rewrite>,
856}
857
858impl MutableRepo {
859 pub fn new(
860 base_repo: Arc<ReadonlyRepo>,
861 index: &dyn ReadonlyIndex,
862 view: &View,
863 ) -> MutableRepo {
864 let mut_view = view.clone();
865 let mut_index = index.start_modification();
866 MutableRepo {
867 base_repo,
868 index: mut_index,
869 view: DirtyCell::with_clean(mut_view),
870 parent_mapping: Default::default(),
871 }
872 }
873
874 pub fn base_repo(&self) -> &Arc<ReadonlyRepo> {
875 &self.base_repo
876 }
877
878 fn view_mut(&mut self) -> &mut View {
879 self.view.get_mut()
880 }
881
882 pub fn mutable_index(&self) -> &dyn MutableIndex {
883 self.index.as_ref()
884 }
885
886 pub fn has_changes(&self) -> bool {
887 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
888 !(self.parent_mapping.is_empty() && self.view() == &self.base_repo.view)
889 }
890
891 pub(crate) fn consume(self) -> (Box<dyn MutableIndex>, View) {
892 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
893 (self.index, self.view.into_inner())
894 }
895
896 pub fn new_commit(&mut self, parents: Vec<CommitId>, tree_id: MergedTreeId) -> CommitBuilder {
898 let settings = self.base_repo.settings();
899 DetachedCommitBuilder::for_new_commit(self, settings, parents, tree_id).attach(self)
900 }
901
902 pub fn rewrite_commit(&mut self, predecessor: &Commit) -> CommitBuilder {
904 let settings = self.base_repo.settings();
905 DetachedCommitBuilder::for_rewrite_from(self, settings, predecessor).attach(self)
906 }
909
910 pub fn set_rewritten_commit(&mut self, old_id: CommitId, new_id: CommitId) {
917 assert_ne!(old_id, *self.store().root_commit_id());
918 self.parent_mapping
919 .insert(old_id, Rewrite::Rewritten(new_id));
920 }
921
922 pub fn set_divergent_rewrite(
930 &mut self,
931 old_id: CommitId,
932 new_ids: impl IntoIterator<Item = CommitId>,
933 ) {
934 assert_ne!(old_id, *self.store().root_commit_id());
935 self.parent_mapping.insert(
936 old_id.clone(),
937 Rewrite::Divergent(new_ids.into_iter().collect()),
938 );
939 }
940
941 pub fn record_abandoned_commit(&mut self, old_commit: &Commit) {
951 assert_ne!(old_commit.id(), self.store().root_commit_id());
952 self.record_abandoned_commit_with_parents(
954 old_commit.id().clone(),
955 old_commit.parent_ids().iter().cloned(),
956 );
957 }
958
959 pub fn record_abandoned_commit_with_parents(
965 &mut self,
966 old_id: CommitId,
967 new_parent_ids: impl IntoIterator<Item = CommitId>,
968 ) {
969 assert_ne!(old_id, *self.store().root_commit_id());
970 self.parent_mapping.insert(
971 old_id,
972 Rewrite::Abandoned(new_parent_ids.into_iter().collect()),
973 );
974 }
975
976 pub fn has_rewrites(&self) -> bool {
977 !self.parent_mapping.is_empty()
978 }
979
980 pub fn new_parents(&self, old_ids: &[CommitId]) -> Vec<CommitId> {
987 self.rewritten_ids_with(old_ids, |rewrite| !matches!(rewrite, Rewrite::Divergent(_)))
988 }
989
990 fn rewritten_ids_with(
991 &self,
992 old_ids: &[CommitId],
993 mut predicate: impl FnMut(&Rewrite) -> bool,
994 ) -> Vec<CommitId> {
995 assert!(!old_ids.is_empty());
996 let mut new_ids = Vec::with_capacity(old_ids.len());
997 let mut to_visit = old_ids.iter().rev().collect_vec();
998 let mut visited = HashSet::new();
999 while let Some(id) = to_visit.pop() {
1000 if !visited.insert(id) {
1001 continue;
1002 }
1003 match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1004 None => {
1005 new_ids.push(id.clone());
1006 }
1007 Some(rewrite) => {
1008 let replacements = rewrite.new_parent_ids();
1009 assert!(
1010 !replacements.is_empty(),
1015 "Found empty value for key {id:?} in the parent mapping",
1016 );
1017 to_visit.extend(replacements.iter().rev());
1018 }
1019 }
1020 }
1021 assert!(
1022 !new_ids.is_empty(),
1023 "new ids become empty because of cycle in the parent mapping"
1024 );
1025 debug_assert!(new_ids.iter().all_unique());
1026 new_ids
1027 }
1028
1029 fn resolve_rewrite_mapping_with(
1033 &self,
1034 mut predicate: impl FnMut(&Rewrite) -> bool,
1035 ) -> HashMap<CommitId, Vec<CommitId>> {
1036 let sorted_ids = dag_walk::topo_order_forward(
1037 self.parent_mapping.keys(),
1038 |&id| id,
1039 |&id| match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1040 None => &[],
1041 Some(rewrite) => rewrite.new_parent_ids(),
1042 },
1043 );
1044 let mut new_mapping: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1045 for old_id in sorted_ids {
1046 let Some(rewrite) = self.parent_mapping.get(old_id).filter(|&v| predicate(v)) else {
1047 continue;
1048 };
1049 let lookup = |id| new_mapping.get(id).map_or(slice::from_ref(id), |ids| ids);
1050 let new_ids = match rewrite.new_parent_ids() {
1051 [id] => lookup(id).to_vec(), ids => ids.iter().flat_map(lookup).unique().cloned().collect(),
1053 };
1054 debug_assert_eq!(
1055 new_ids,
1056 self.rewritten_ids_with(slice::from_ref(old_id), &mut predicate)
1057 );
1058 new_mapping.insert(old_id.clone(), new_ids);
1059 }
1060 new_mapping
1061 }
1062
1063 pub fn update_rewritten_references(
1066 &mut self,
1067 options: &RewriteRefsOptions,
1068 ) -> BackendResult<()> {
1069 self.update_all_references(options)?;
1070 self.update_heads();
1071 Ok(())
1072 }
1073
1074 fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> {
1075 let rewrite_mapping = self.resolve_rewrite_mapping_with(|_| true);
1076 self.update_local_bookmarks(&rewrite_mapping, options);
1077 self.update_wc_commits(&rewrite_mapping)?;
1078 Ok(())
1079 }
1080
1081 fn update_local_bookmarks(
1082 &mut self,
1083 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1084 options: &RewriteRefsOptions,
1085 ) {
1086 let changed_branches = self
1087 .view()
1088 .local_bookmarks()
1089 .flat_map(|(name, target)| {
1090 target.added_ids().filter_map(|id| {
1091 let change = rewrite_mapping.get_key_value(id)?;
1092 Some((name.to_owned(), change))
1093 })
1094 })
1095 .collect_vec();
1096 for (bookmark_name, (old_commit_id, new_commit_ids)) in changed_branches {
1097 let should_delete = options.delete_abandoned_bookmarks
1098 && matches!(
1099 self.parent_mapping.get(old_commit_id),
1100 Some(Rewrite::Abandoned(_))
1101 );
1102 let old_target = RefTarget::normal(old_commit_id.clone());
1103 let new_target = if should_delete {
1104 RefTarget::absent()
1105 } else {
1106 let ids = itertools::intersperse(new_commit_ids, old_commit_id)
1107 .map(|id| Some(id.clone()));
1108 RefTarget::from_merge(MergeBuilder::from_iter(ids).build())
1109 };
1110
1111 self.merge_local_bookmark(&bookmark_name, &old_target, &new_target);
1112 }
1113 }
1114
1115 fn update_wc_commits(
1116 &mut self,
1117 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1118 ) -> BackendResult<()> {
1119 let changed_wc_commits = self
1120 .view()
1121 .wc_commit_ids()
1122 .iter()
1123 .filter_map(|(workspace_id, commit_id)| {
1124 let change = rewrite_mapping.get_key_value(commit_id)?;
1125 Some((workspace_id.to_owned(), change))
1126 })
1127 .collect_vec();
1128 let mut recreated_wc_commits: HashMap<&CommitId, Commit> = HashMap::new();
1129 for (workspace_id, (old_commit_id, new_commit_ids)) in changed_wc_commits {
1130 let abandoned_old_commit = matches!(
1131 self.parent_mapping.get(old_commit_id),
1132 Some(Rewrite::Abandoned(_))
1133 );
1134 let new_wc_commit = if !abandoned_old_commit {
1135 self.store().get_commit(&new_commit_ids[0])?
1137 } else if let Some(commit) = recreated_wc_commits.get(old_commit_id) {
1138 commit.clone()
1139 } else {
1140 let new_commits: Vec<_> = new_commit_ids
1141 .iter()
1142 .map(|id| self.store().get_commit(id))
1143 .try_collect()?;
1144 let merged_parents_tree = merge_commit_trees(self, &new_commits)?;
1145 let commit = self
1146 .new_commit(new_commit_ids.clone(), merged_parents_tree.id().clone())
1147 .write()?;
1148 recreated_wc_commits.insert(old_commit_id, commit.clone());
1149 commit
1150 };
1151 self.edit(workspace_id, &new_wc_commit).unwrap();
1152 }
1153 Ok(())
1154 }
1155
1156 fn update_heads(&mut self) {
1157 let old_commits_expression =
1158 RevsetExpression::commits(self.parent_mapping.keys().cloned().collect());
1159 let heads_to_add_expression = old_commits_expression
1160 .parents()
1161 .minus(&old_commits_expression);
1162 let heads_to_add = heads_to_add_expression
1163 .evaluate(self)
1164 .unwrap()
1165 .iter()
1166 .map(Result::unwrap); let mut view = self.view().store_view().clone();
1169 for commit_id in self.parent_mapping.keys() {
1170 view.head_ids.remove(commit_id);
1171 }
1172 view.head_ids.extend(heads_to_add);
1173 self.set_view(view);
1174 }
1175
1176 fn find_descendants_to_rebase(&self, roots: Vec<CommitId>) -> BackendResult<Vec<Commit>> {
1181 let store = self.store();
1182 let to_visit_expression =
1183 RevsetExpression::commits(roots)
1184 .descendants()
1185 .minus(&RevsetExpression::commits(
1186 self.parent_mapping.keys().cloned().collect(),
1187 ));
1188 let to_visit_revset = to_visit_expression
1189 .evaluate(self)
1190 .map_err(|err| err.expect_backend_error())?;
1191 let to_visit: Vec<_> = to_visit_revset
1192 .iter()
1193 .commits(store)
1194 .try_collect()
1195 .map_err(|err| err.expect_backend_error())?;
1197 drop(to_visit_revset);
1198 let to_visit_set: HashSet<CommitId> =
1199 to_visit.iter().map(|commit| commit.id().clone()).collect();
1200 let mut visited = HashSet::new();
1201 dag_walk::topo_order_reverse_ok(
1204 to_visit.into_iter().map(Ok),
1205 |commit| commit.id().clone(),
1206 |commit| -> Vec<BackendResult<Commit>> {
1207 visited.insert(commit.id().clone());
1208 let mut dependents = vec![];
1209 for parent in commit.parents() {
1210 let Ok(parent) = parent else {
1211 dependents.push(parent);
1212 continue;
1213 };
1214 if let Some(rewrite) = self.parent_mapping.get(parent.id()) {
1215 for target in rewrite.new_parent_ids() {
1216 if to_visit_set.contains(target) && !visited.contains(target) {
1217 dependents.push(store.get_commit(target));
1218 }
1219 }
1220 }
1221 if to_visit_set.contains(parent.id()) {
1222 dependents.push(Ok(parent));
1223 }
1224 }
1225 dependents
1226 },
1227 )
1228 }
1229
1230 pub fn transform_descendants(
1242 &mut self,
1243 roots: Vec<CommitId>,
1244 callback: impl FnMut(CommitRewriter) -> BackendResult<()>,
1245 ) -> BackendResult<()> {
1246 let options = RewriteRefsOptions::default();
1247 self.transform_descendants_with_options(roots, &options, callback)
1248 }
1249
1250 pub fn transform_descendants_with_options(
1254 &mut self,
1255 roots: Vec<CommitId>,
1256 options: &RewriteRefsOptions,
1257 mut callback: impl FnMut(CommitRewriter) -> BackendResult<()>,
1258 ) -> BackendResult<()> {
1259 let mut to_visit = self.find_descendants_to_rebase(roots)?;
1260 while let Some(old_commit) = to_visit.pop() {
1261 let new_parent_ids = self.new_parents(old_commit.parent_ids());
1262 let rewriter = CommitRewriter::new(self, old_commit, new_parent_ids);
1263 callback(rewriter)?;
1264 }
1265 self.update_rewritten_references(options)?;
1266 Ok(())
1276 }
1277
1278 pub fn rebase_descendants_with_options(
1294 &mut self,
1295 options: &RebaseOptions,
1296 mut progress: impl FnMut(Commit, RebasedCommit),
1297 ) -> BackendResult<()> {
1298 let roots = self.parent_mapping.keys().cloned().collect();
1299 self.transform_descendants_with_options(roots, &options.rewrite_refs, |rewriter| {
1300 if rewriter.parents_changed() {
1301 let old_commit = rewriter.old_commit().clone();
1302 let rebased_commit = rebase_commit_with_options(rewriter, options)?;
1303 progress(old_commit, rebased_commit);
1304 }
1305 Ok(())
1306 })?;
1307 self.parent_mapping.clear();
1308 Ok(())
1309 }
1310
1311 pub fn rebase_descendants(&mut self) -> BackendResult<usize> {
1321 let options = RebaseOptions::default();
1322 let mut num_rebased = 0;
1323 self.rebase_descendants_with_options(&options, |_old_commit, _rebased_commit| {
1324 num_rebased += 1;
1325 })?;
1326 Ok(num_rebased)
1327 }
1328
1329 pub fn reparent_descendants(&mut self) -> BackendResult<usize> {
1336 let roots = self.parent_mapping.keys().cloned().collect_vec();
1337 let mut num_reparented = 0;
1338 self.transform_descendants(roots, |rewriter| {
1339 if rewriter.parents_changed() {
1340 let builder = rewriter.reparent();
1341 builder.write()?;
1342 num_reparented += 1;
1343 }
1344 Ok(())
1345 })?;
1346 self.parent_mapping.clear();
1347 Ok(num_reparented)
1348 }
1349
1350 pub fn set_wc_commit(
1351 &mut self,
1352 workspace_id: WorkspaceId,
1353 commit_id: CommitId,
1354 ) -> Result<(), RewriteRootCommit> {
1355 if &commit_id == self.store().root_commit_id() {
1356 return Err(RewriteRootCommit);
1357 }
1358 self.view_mut().set_wc_commit(workspace_id, commit_id);
1359 Ok(())
1360 }
1361
1362 pub fn remove_wc_commit(&mut self, workspace_id: &WorkspaceId) -> Result<(), EditCommitError> {
1363 self.maybe_abandon_wc_commit(workspace_id)?;
1364 self.view_mut().remove_wc_commit(workspace_id);
1365 Ok(())
1366 }
1367
1368 pub fn rename_workspace(
1369 &mut self,
1370 old_workspace_id: &WorkspaceId,
1371 new_workspace_id: WorkspaceId,
1372 ) -> Result<(), RenameWorkspaceError> {
1373 self.view_mut()
1374 .rename_workspace(old_workspace_id, new_workspace_id)
1375 }
1376
1377 pub fn check_out(
1378 &mut self,
1379 workspace_id: WorkspaceId,
1380 commit: &Commit,
1381 ) -> Result<Commit, CheckOutCommitError> {
1382 let wc_commit = self
1383 .new_commit(vec![commit.id().clone()], commit.tree_id().clone())
1384 .write()?;
1385 self.edit(workspace_id, &wc_commit)?;
1386 Ok(wc_commit)
1387 }
1388
1389 pub fn edit(
1390 &mut self,
1391 workspace_id: WorkspaceId,
1392 commit: &Commit,
1393 ) -> Result<(), EditCommitError> {
1394 self.maybe_abandon_wc_commit(&workspace_id)?;
1395 self.add_head(commit)?;
1396 Ok(self.set_wc_commit(workspace_id, commit.id().clone())?)
1397 }
1398
1399 fn maybe_abandon_wc_commit(
1400 &mut self,
1401 workspace_id: &WorkspaceId,
1402 ) -> Result<(), EditCommitError> {
1403 let is_commit_referenced = |view: &View, commit_id: &CommitId| -> bool {
1404 view.wc_commit_ids()
1405 .iter()
1406 .filter(|&(ws_id, _)| ws_id != workspace_id)
1407 .map(|(_, wc_id)| wc_id)
1408 .chain(
1409 view.local_bookmarks()
1410 .flat_map(|(_, target)| target.added_ids()),
1411 )
1412 .any(|id| id == commit_id)
1413 };
1414
1415 let maybe_wc_commit_id = self
1416 .view
1417 .with_ref(|v| v.get_wc_commit_id(workspace_id).cloned());
1418 if let Some(wc_commit_id) = maybe_wc_commit_id {
1419 let wc_commit = self
1420 .store()
1421 .get_commit(&wc_commit_id)
1422 .map_err(EditCommitError::WorkingCopyCommitNotFound)?;
1423 if wc_commit.is_discardable(self)?
1424 && self
1425 .view
1426 .with_ref(|v| !is_commit_referenced(v, wc_commit.id()))
1427 && self.view().heads().contains(wc_commit.id())
1428 {
1429 self.record_abandoned_commit(&wc_commit);
1433 }
1434 }
1435
1436 Ok(())
1437 }
1438
1439 fn enforce_view_invariants(&self, view: &mut View) {
1440 let view = view.store_view_mut();
1441 let root_commit_id = self.store().root_commit_id();
1442 if view.head_ids.is_empty() {
1443 view.head_ids.insert(root_commit_id.clone());
1444 } else if view.head_ids.len() > 1 {
1445 view.head_ids.remove(root_commit_id);
1448 view.head_ids = self
1452 .index()
1453 .heads(&mut view.head_ids.iter())
1454 .unwrap()
1455 .into_iter()
1456 .collect();
1457 }
1458 assert!(!view.head_ids.is_empty());
1459 }
1460
1461 pub fn add_head(&mut self, head: &Commit) -> BackendResult<()> {
1464 self.add_heads(slice::from_ref(head))
1465 }
1466
1467 pub fn add_heads(&mut self, heads: &[Commit]) -> BackendResult<()> {
1474 let current_heads = self.view.get_mut().heads();
1475 match heads {
1479 [] => {}
1480 [head]
1481 if head
1482 .parent_ids()
1483 .iter()
1484 .all(|parent_id| current_heads.contains(parent_id)) =>
1485 {
1486 self.index.add_commit(head);
1487 self.view.get_mut().add_head(head.id());
1488 for parent_id in head.parent_ids() {
1489 self.view.get_mut().remove_head(parent_id);
1490 }
1491 }
1492 _ => {
1493 let missing_commits = dag_walk::topo_order_reverse_ord_ok(
1494 heads
1495 .iter()
1496 .cloned()
1497 .map(CommitByCommitterTimestamp)
1498 .map(Ok),
1499 |CommitByCommitterTimestamp(commit)| commit.id().clone(),
1500 |CommitByCommitterTimestamp(commit)| {
1501 commit
1502 .parent_ids()
1503 .iter()
1504 .filter(|id| !self.index().has_id(id))
1505 .map(|id| self.store().get_commit(id))
1506 .map_ok(CommitByCommitterTimestamp)
1507 .collect_vec()
1508 },
1509 )?;
1510 for CommitByCommitterTimestamp(missing_commit) in missing_commits.iter().rev() {
1511 self.index.add_commit(missing_commit);
1512 }
1513 for head in heads {
1514 self.view.get_mut().add_head(head.id());
1515 }
1516 self.view.mark_dirty();
1517 }
1518 }
1519 Ok(())
1520 }
1521
1522 pub fn remove_head(&mut self, head: &CommitId) {
1523 self.view_mut().remove_head(head);
1524 self.view.mark_dirty();
1525 }
1526
1527 pub fn get_local_bookmark(&self, name: &str) -> RefTarget {
1528 self.view.with_ref(|v| v.get_local_bookmark(name).clone())
1529 }
1530
1531 pub fn set_local_bookmark_target(&mut self, name: &str, target: RefTarget) {
1532 let view = self.view_mut();
1533 for id in target.added_ids() {
1534 view.add_head(id);
1535 }
1536 view.set_local_bookmark_target(name, target);
1537 }
1538
1539 pub fn merge_local_bookmark(
1540 &mut self,
1541 name: &str,
1542 base_target: &RefTarget,
1543 other_target: &RefTarget,
1544 ) {
1545 let view = self.view.get_mut();
1546 let index = self.index.as_index();
1547 let self_target = view.get_local_bookmark(name);
1548 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1549 self.set_local_bookmark_target(name, new_target);
1550 }
1551
1552 pub fn get_remote_bookmark(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1553 self.view
1554 .with_ref(|v| v.get_remote_bookmark(symbol).clone())
1555 }
1556
1557 pub fn set_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1558 self.view_mut().set_remote_bookmark(symbol, remote_ref);
1559 }
1560
1561 fn merge_remote_bookmark(
1562 &mut self,
1563 symbol: RemoteRefSymbol<'_>,
1564 base_ref: &RemoteRef,
1565 other_ref: &RemoteRef,
1566 ) {
1567 let view = self.view.get_mut();
1568 let index = self.index.as_index();
1569 let self_ref = view.get_remote_bookmark(symbol);
1570 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref);
1571 view.set_remote_bookmark(symbol, new_ref);
1572 }
1573
1574 pub fn track_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1577 let mut remote_ref = self.get_remote_bookmark(symbol);
1578 let base_target = remote_ref.tracking_target();
1579 self.merge_local_bookmark(symbol.name, base_target, &remote_ref.target);
1580 remote_ref.state = RemoteRefState::Tracking;
1581 self.set_remote_bookmark(symbol, remote_ref);
1582 }
1583
1584 pub fn untrack_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1586 let mut remote_ref = self.get_remote_bookmark(symbol);
1587 remote_ref.state = RemoteRefState::New;
1588 self.set_remote_bookmark(symbol, remote_ref);
1589 }
1590
1591 pub fn remove_remote(&mut self, remote_name: &str) {
1592 self.view_mut().remove_remote(remote_name);
1593 }
1594
1595 pub fn rename_remote(&mut self, old: &str, new: &str) {
1596 self.view_mut().rename_remote(old, new);
1597 }
1598
1599 pub fn get_tag(&self, name: &str) -> RefTarget {
1600 self.view.with_ref(|v| v.get_tag(name).clone())
1601 }
1602
1603 pub fn set_tag_target(&mut self, name: &str, target: RefTarget) {
1604 self.view_mut().set_tag_target(name, target);
1605 }
1606
1607 pub fn merge_tag(&mut self, name: &str, base_target: &RefTarget, other_target: &RefTarget) {
1608 let view = self.view.get_mut();
1609 let index = self.index.as_index();
1610 let self_target = view.get_tag(name);
1611 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1612 view.set_tag_target(name, new_target);
1613 }
1614
1615 pub fn get_git_ref(&self, name: &str) -> RefTarget {
1616 self.view.with_ref(|v| v.get_git_ref(name).clone())
1617 }
1618
1619 pub fn set_git_ref_target(&mut self, name: &str, target: RefTarget) {
1620 self.view_mut().set_git_ref_target(name, target);
1621 }
1622
1623 fn merge_git_ref(&mut self, name: &str, base_target: &RefTarget, other_target: &RefTarget) {
1624 let view = self.view.get_mut();
1625 let index = self.index.as_index();
1626 let self_target = view.get_git_ref(name);
1627 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1628 view.set_git_ref_target(name, new_target);
1629 }
1630
1631 pub fn git_head(&self) -> RefTarget {
1632 self.view.with_ref(|v| v.git_head().clone())
1633 }
1634
1635 pub fn set_git_head_target(&mut self, target: RefTarget) {
1636 self.view_mut().set_git_head_target(target);
1637 }
1638
1639 pub fn set_view(&mut self, data: op_store::View) {
1640 self.view_mut().set_view(data);
1641 self.view.mark_dirty();
1642 }
1643
1644 pub fn merge(
1645 &mut self,
1646 base_repo: &ReadonlyRepo,
1647 other_repo: &ReadonlyRepo,
1648 ) -> BackendResult<()> {
1649 self.index.merge_in(base_repo.readonly_index());
1654 self.index.merge_in(other_repo.readonly_index());
1655
1656 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
1657 self.merge_view(&base_repo.view, &other_repo.view)?;
1658 self.view.mark_dirty();
1659 Ok(())
1660 }
1661
1662 pub fn merge_index(&mut self, other_repo: &ReadonlyRepo) {
1663 self.index.merge_in(other_repo.readonly_index());
1664 }
1665
1666 fn merge_view(&mut self, base: &View, other: &View) -> BackendResult<()> {
1667 for (workspace_id, base_wc_commit) in base.wc_commit_ids() {
1669 let self_wc_commit = self.view().get_wc_commit_id(workspace_id);
1670 let other_wc_commit = other.get_wc_commit_id(workspace_id);
1671 if other_wc_commit == Some(base_wc_commit) || other_wc_commit == self_wc_commit {
1672 } else if let Some(other_wc_commit) = other_wc_commit {
1675 if self_wc_commit == Some(base_wc_commit) {
1676 self.view_mut()
1677 .set_wc_commit(workspace_id.clone(), other_wc_commit.clone());
1678 }
1679 } else {
1680 self.view_mut().remove_wc_commit(workspace_id);
1683 }
1684 }
1685 for (workspace_id, other_wc_commit) in other.wc_commit_ids() {
1686 if self.view().get_wc_commit_id(workspace_id).is_none()
1687 && base.get_wc_commit_id(workspace_id).is_none()
1688 {
1689 self.view_mut()
1691 .set_wc_commit(workspace_id.clone(), other_wc_commit.clone());
1692 }
1693 }
1694 let base_heads = base.heads().iter().cloned().collect_vec();
1695 let own_heads = self.view().heads().iter().cloned().collect_vec();
1696 let other_heads = other.heads().iter().cloned().collect_vec();
1697
1698 if self.index.as_any().is::<DefaultMutableIndex>() {
1705 self.record_rewrites(&base_heads, &own_heads)?;
1706 self.record_rewrites(&base_heads, &other_heads)?;
1707 } else {
1710 for removed_head in base.heads().difference(other.heads()) {
1711 self.view_mut().remove_head(removed_head);
1712 }
1713 }
1714 for added_head in other.heads().difference(base.heads()) {
1715 self.view_mut().add_head(added_head);
1716 }
1717
1718 let changed_local_bookmarks =
1719 diff_named_ref_targets(base.local_bookmarks(), other.local_bookmarks());
1720 for (name, (base_target, other_target)) in changed_local_bookmarks {
1721 self.merge_local_bookmark(name, base_target, other_target);
1722 }
1723
1724 let changed_tags = diff_named_ref_targets(base.tags(), other.tags());
1725 for (name, (base_target, other_target)) in changed_tags {
1726 self.merge_tag(name, base_target, other_target);
1727 }
1728
1729 let changed_git_refs = diff_named_ref_targets(base.git_refs(), other.git_refs());
1730 for (name, (base_target, other_target)) in changed_git_refs {
1731 self.merge_git_ref(name, base_target, other_target);
1732 }
1733
1734 let changed_remote_bookmarks =
1735 diff_named_remote_refs(base.all_remote_bookmarks(), other.all_remote_bookmarks());
1736 for (symbol, (base_ref, other_ref)) in changed_remote_bookmarks {
1737 self.merge_remote_bookmark(symbol, base_ref, other_ref);
1738 }
1739
1740 let new_git_head_target = merge_ref_targets(
1741 self.index(),
1742 self.view().git_head(),
1743 base.git_head(),
1744 other.git_head(),
1745 );
1746 self.set_git_head_target(new_git_head_target);
1747
1748 Ok(())
1749 }
1750
1751 fn record_rewrites(
1754 &mut self,
1755 old_heads: &[CommitId],
1756 new_heads: &[CommitId],
1757 ) -> BackendResult<()> {
1758 let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
1759 for item in revset::walk_revs(self, old_heads, new_heads)
1760 .map_err(|err| err.expect_backend_error())?
1761 .commit_change_ids()
1762 {
1763 let (commit_id, change_id) = item.map_err(|err| err.expect_backend_error())?;
1764 removed_changes
1765 .entry(change_id)
1766 .or_default()
1767 .push(commit_id);
1768 }
1769 if removed_changes.is_empty() {
1770 return Ok(());
1771 }
1772
1773 let mut rewritten_changes = HashSet::new();
1774 let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1775 for item in revset::walk_revs(self, new_heads, old_heads)
1776 .map_err(|err| err.expect_backend_error())?
1777 .commit_change_ids()
1778 {
1779 let (commit_id, change_id) = item.map_err(|err| err.expect_backend_error())?;
1780 if let Some(old_commits) = removed_changes.get(&change_id) {
1781 for old_commit in old_commits {
1782 rewritten_commits
1783 .entry(old_commit.clone())
1784 .or_default()
1785 .push(commit_id.clone());
1786 }
1787 }
1788 rewritten_changes.insert(change_id);
1789 }
1790 for (old_commit, new_commits) in rewritten_commits {
1791 if new_commits.len() == 1 {
1792 self.set_rewritten_commit(
1793 old_commit.clone(),
1794 new_commits.into_iter().next().unwrap(),
1795 );
1796 } else {
1797 self.set_divergent_rewrite(old_commit.clone(), new_commits);
1798 }
1799 }
1800
1801 for (change_id, removed_commit_ids) in &removed_changes {
1802 if !rewritten_changes.contains(change_id) {
1803 for id in removed_commit_ids {
1804 let commit = self.store().get_commit(id)?;
1805 self.record_abandoned_commit(&commit);
1806 }
1807 }
1808 }
1809
1810 Ok(())
1811 }
1812}
1813
1814impl Repo for MutableRepo {
1815 fn base_repo(&self) -> &ReadonlyRepo {
1816 &self.base_repo
1817 }
1818
1819 fn store(&self) -> &Arc<Store> {
1820 self.base_repo.store()
1821 }
1822
1823 fn op_store(&self) -> &Arc<dyn OpStore> {
1824 self.base_repo.op_store()
1825 }
1826
1827 fn index(&self) -> &dyn Index {
1828 self.index.as_index()
1829 }
1830
1831 fn view(&self) -> &View {
1832 self.view
1833 .get_or_ensure_clean(|v| self.enforce_view_invariants(v))
1834 }
1835
1836 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
1837 self.base_repo.submodule_store()
1838 }
1839
1840 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>> {
1841 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
1842 change_id_index.resolve_prefix(prefix)
1843 }
1844
1845 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
1846 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
1847 change_id_index.shortest_unique_prefix_len(target_id)
1848 }
1849}
1850
1851#[derive(Debug, Error)]
1853#[error("Cannot rewrite the root commit")]
1854pub struct RewriteRootCommit;
1855
1856#[derive(Debug, Error)]
1858pub enum EditCommitError {
1859 #[error("Current working-copy commit not found")]
1860 WorkingCopyCommitNotFound(#[source] BackendError),
1861 #[error(transparent)]
1862 RewriteRootCommit(#[from] RewriteRootCommit),
1863 #[error(transparent)]
1864 BackendError(#[from] BackendError),
1865}
1866
1867#[derive(Debug, Error)]
1869pub enum CheckOutCommitError {
1870 #[error("Failed to create new working-copy commit")]
1871 CreateCommit(#[from] BackendError),
1872 #[error("Failed to edit commit")]
1873 EditCommit(#[from] EditCommitError),
1874}
1875
1876mod dirty_cell {
1877 use std::cell::OnceCell;
1878 use std::cell::RefCell;
1879
1880 #[derive(Clone, Debug)]
1884 pub struct DirtyCell<T> {
1885 clean: OnceCell<Box<T>>,
1888 dirty: RefCell<Option<Box<T>>>,
1889 }
1890
1891 impl<T> DirtyCell<T> {
1892 pub fn with_clean(value: T) -> Self {
1893 DirtyCell {
1894 clean: OnceCell::from(Box::new(value)),
1895 dirty: RefCell::new(None),
1896 }
1897 }
1898
1899 pub fn get_or_ensure_clean(&self, f: impl FnOnce(&mut T)) -> &T {
1900 self.clean.get_or_init(|| {
1901 let mut value = self.dirty.borrow_mut().take().unwrap();
1903 f(&mut value);
1904 value
1905 })
1906 }
1907
1908 pub fn ensure_clean(&self, f: impl FnOnce(&mut T)) {
1909 self.get_or_ensure_clean(f);
1910 }
1911
1912 pub fn into_inner(self) -> T {
1913 *self
1914 .clean
1915 .into_inner()
1916 .or_else(|| self.dirty.into_inner())
1917 .unwrap()
1918 }
1919
1920 pub fn with_ref<R>(&self, f: impl FnOnce(&T) -> R) -> R {
1921 if let Some(value) = self.clean.get() {
1922 f(value)
1923 } else {
1924 f(self.dirty.borrow().as_ref().unwrap())
1925 }
1926 }
1927
1928 pub fn get_mut(&mut self) -> &mut T {
1929 self.clean
1930 .get_mut()
1931 .or_else(|| self.dirty.get_mut().as_mut())
1932 .unwrap()
1933 }
1934
1935 pub fn mark_dirty(&mut self) {
1936 if let Some(value) = self.clean.take() {
1937 *self.dirty.get_mut() = Some(value);
1938 }
1939 }
1940 }
1941}