1#![allow(missing_docs)]
16
17use std::collections::hash_map::Entry;
18use std::collections::HashMap;
19use std::collections::HashSet;
20use std::fmt::Debug;
21use std::fmt::Formatter;
22use std::fs;
23use std::path::Path;
24use std::slice;
25use std::sync::Arc;
26
27use itertools::Itertools as _;
28use once_cell::sync::OnceCell;
29use thiserror::Error;
30use tracing::instrument;
31
32use self::dirty_cell::DirtyCell;
33use crate::backend::Backend;
34use crate::backend::BackendError;
35use crate::backend::BackendInitError;
36use crate::backend::BackendLoadError;
37use crate::backend::BackendResult;
38use crate::backend::ChangeId;
39use crate::backend::CommitId;
40use crate::backend::MergedTreeId;
41use crate::commit::Commit;
42use crate::commit::CommitByCommitterTimestamp;
43use crate::commit_builder::CommitBuilder;
44use crate::commit_builder::DetachedCommitBuilder;
45use crate::dag_walk;
46use crate::default_index::DefaultIndexStore;
47use crate::default_index::DefaultMutableIndex;
48use crate::default_submodule_store::DefaultSubmoduleStore;
49use crate::file_util::IoResultExt as _;
50use crate::file_util::PathError;
51use crate::index::ChangeIdIndex;
52use crate::index::Index;
53use crate::index::IndexReadError;
54use crate::index::IndexStore;
55use crate::index::MutableIndex;
56use crate::index::ReadonlyIndex;
57use crate::merge::MergeBuilder;
58use crate::object_id::HexPrefix;
59use crate::object_id::ObjectId as _;
60use crate::object_id::PrefixResolution;
61use crate::op_heads_store;
62use crate::op_heads_store::OpHeadResolutionError;
63use crate::op_heads_store::OpHeadsStore;
64use crate::op_heads_store::OpHeadsStoreError;
65use crate::op_store;
66use crate::op_store::OpStore;
67use crate::op_store::OpStoreError;
68use crate::op_store::OpStoreResult;
69use crate::op_store::OperationId;
70use crate::op_store::RefTarget;
71use crate::op_store::RemoteRef;
72use crate::op_store::RemoteRefState;
73use crate::op_store::RootOperationData;
74use crate::operation::Operation;
75use crate::ref_name::GitRefName;
76use crate::ref_name::RefName;
77use crate::ref_name::RemoteName;
78use crate::ref_name::RemoteRefSymbol;
79use crate::ref_name::WorkspaceName;
80use crate::ref_name::WorkspaceNameBuf;
81use crate::refs::diff_named_ref_targets;
82use crate::refs::diff_named_remote_refs;
83use crate::refs::merge_ref_targets;
84use crate::refs::merge_remote_refs;
85use crate::revset;
86use crate::revset::RevsetExpression;
87use crate::revset::RevsetIteratorExt as _;
88use crate::rewrite::merge_commit_trees;
89use crate::rewrite::rebase_commit_with_options;
90use crate::rewrite::CommitRewriter;
91use crate::rewrite::RebaseOptions;
92use crate::rewrite::RebasedCommit;
93use crate::rewrite::RewriteRefsOptions;
94use crate::settings::UserSettings;
95use crate::signing::SignInitError;
96use crate::signing::Signer;
97use crate::simple_backend::SimpleBackend;
98use crate::simple_op_heads_store::SimpleOpHeadsStore;
99use crate::simple_op_store::SimpleOpStore;
100use crate::store::Store;
101use crate::submodule_store::SubmoduleStore;
102use crate::transaction::Transaction;
103use crate::view::RenameWorkspaceError;
104use crate::view::View;
105
106pub trait Repo {
107 fn base_repo(&self) -> &ReadonlyRepo;
110
111 fn store(&self) -> &Arc<Store>;
112
113 fn op_store(&self) -> &Arc<dyn OpStore>;
114
115 fn index(&self) -> &dyn Index;
116
117 fn view(&self) -> &View;
118
119 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore>;
120
121 fn resolve_change_id(&self, change_id: &ChangeId) -> Option<Vec<CommitId>> {
122 let prefix = HexPrefix::from_bytes(change_id.as_bytes());
124 match self.resolve_change_id_prefix(&prefix) {
125 PrefixResolution::NoMatch => None,
126 PrefixResolution::SingleMatch(entries) => Some(entries),
127 PrefixResolution::AmbiguousMatch => panic!("complete change_id should be unambiguous"),
128 }
129 }
130
131 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>>;
132
133 fn shortest_unique_change_id_prefix_len(&self, target_id_bytes: &ChangeId) -> usize;
134}
135
136pub struct ReadonlyRepo {
137 loader: RepoLoader,
138 operation: Operation,
139 index: Box<dyn ReadonlyIndex>,
140 change_id_index: OnceCell<Box<dyn ChangeIdIndex>>,
141 view: View,
143}
144
145impl Debug for ReadonlyRepo {
146 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
147 f.debug_struct("ReadonlyRepo")
148 .field("store", &self.loader.store)
149 .finish_non_exhaustive()
150 }
151}
152
153#[derive(Error, Debug)]
154pub enum RepoInitError {
155 #[error(transparent)]
156 Backend(#[from] BackendInitError),
157 #[error(transparent)]
158 OpHeadsStore(#[from] OpHeadsStoreError),
159 #[error(transparent)]
160 Path(#[from] PathError),
161}
162
163impl ReadonlyRepo {
164 pub fn default_op_store_initializer() -> &'static OpStoreInitializer<'static> {
165 &|_settings, store_path, root_data| {
166 Ok(Box::new(SimpleOpStore::init(store_path, root_data)?))
167 }
168 }
169
170 pub fn default_op_heads_store_initializer() -> &'static OpHeadsStoreInitializer<'static> {
171 &|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::init(store_path)?))
172 }
173
174 pub fn default_index_store_initializer() -> &'static IndexStoreInitializer<'static> {
175 &|_settings, store_path| Ok(Box::new(DefaultIndexStore::init(store_path)?))
176 }
177
178 pub fn default_submodule_store_initializer() -> &'static SubmoduleStoreInitializer<'static> {
179 &|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::init(store_path)))
180 }
181
182 #[expect(clippy::too_many_arguments)]
183 pub fn init(
184 settings: &UserSettings,
185 repo_path: &Path,
186 backend_initializer: &BackendInitializer,
187 signer: Signer,
188 op_store_initializer: &OpStoreInitializer,
189 op_heads_store_initializer: &OpHeadsStoreInitializer,
190 index_store_initializer: &IndexStoreInitializer,
191 submodule_store_initializer: &SubmoduleStoreInitializer,
192 ) -> Result<Arc<ReadonlyRepo>, RepoInitError> {
193 let repo_path = dunce::canonicalize(repo_path).context(repo_path)?;
194
195 let store_path = repo_path.join("store");
196 fs::create_dir(&store_path).context(&store_path)?;
197 let backend = backend_initializer(settings, &store_path)?;
198 let backend_path = store_path.join("type");
199 fs::write(&backend_path, backend.name()).context(&backend_path)?;
200 let store = Store::new(backend, signer);
201
202 let op_store_path = repo_path.join("op_store");
203 fs::create_dir(&op_store_path).context(&op_store_path)?;
204 let root_op_data = RootOperationData {
205 root_commit_id: store.root_commit_id().clone(),
206 };
207 let op_store = op_store_initializer(settings, &op_store_path, root_op_data)?;
208 let op_store_type_path = op_store_path.join("type");
209 fs::write(&op_store_type_path, op_store.name()).context(&op_store_type_path)?;
210 let op_store: Arc<dyn OpStore> = Arc::from(op_store);
211
212 let op_heads_path = repo_path.join("op_heads");
213 fs::create_dir(&op_heads_path).context(&op_heads_path)?;
214 let op_heads_store = op_heads_store_initializer(settings, &op_heads_path)?;
215 let op_heads_type_path = op_heads_path.join("type");
216 fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?;
217 op_heads_store.update_op_heads(&[], op_store.root_operation_id())?;
218 let op_heads_store: Arc<dyn OpHeadsStore> = Arc::from(op_heads_store);
219
220 let index_path = repo_path.join("index");
221 fs::create_dir(&index_path).context(&index_path)?;
222 let index_store = index_store_initializer(settings, &index_path)?;
223 let index_type_path = index_path.join("type");
224 fs::write(&index_type_path, index_store.name()).context(&index_type_path)?;
225 let index_store: Arc<dyn IndexStore> = Arc::from(index_store);
226
227 let submodule_store_path = repo_path.join("submodule_store");
228 fs::create_dir(&submodule_store_path).context(&submodule_store_path)?;
229 let submodule_store = submodule_store_initializer(settings, &submodule_store_path)?;
230 let submodule_store_type_path = submodule_store_path.join("type");
231 fs::write(&submodule_store_type_path, submodule_store.name())
232 .context(&submodule_store_type_path)?;
233 let submodule_store = Arc::from(submodule_store);
234
235 let loader = RepoLoader {
236 settings: settings.clone(),
237 store,
238 op_store,
239 op_heads_store,
240 index_store,
241 submodule_store,
242 };
243
244 let root_operation = loader.root_operation();
245 let root_view = root_operation.view().expect("failed to read root view");
246 assert!(!root_view.heads().is_empty());
247 let index = loader
248 .index_store
249 .get_index_at_op(&root_operation, &loader.store)
250 .map_err(|err| BackendInitError(err.into()))?;
253 Ok(Arc::new(ReadonlyRepo {
254 loader,
255 operation: root_operation,
256 index,
257 change_id_index: OnceCell::new(),
258 view: root_view,
259 }))
260 }
261
262 pub fn loader(&self) -> &RepoLoader {
263 &self.loader
264 }
265
266 pub fn op_id(&self) -> &OperationId {
267 self.operation.id()
268 }
269
270 pub fn operation(&self) -> &Operation {
271 &self.operation
272 }
273
274 pub fn view(&self) -> &View {
275 &self.view
276 }
277
278 pub fn readonly_index(&self) -> &dyn ReadonlyIndex {
279 self.index.as_ref()
280 }
281
282 fn change_id_index(&self) -> &dyn ChangeIdIndex {
283 self.change_id_index
284 .get_or_init(|| {
285 self.readonly_index()
286 .change_id_index(&mut self.view().heads().iter())
287 })
288 .as_ref()
289 }
290
291 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
292 self.loader.op_heads_store()
293 }
294
295 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
296 self.loader.index_store()
297 }
298
299 pub fn settings(&self) -> &UserSettings {
300 self.loader.settings()
301 }
302
303 pub fn start_transaction(self: &Arc<ReadonlyRepo>) -> Transaction {
304 let mut_repo = MutableRepo::new(self.clone(), self.readonly_index(), &self.view);
305 Transaction::new(mut_repo, self.settings())
306 }
307
308 pub fn reload_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
309 self.loader().load_at_head()
310 }
311
312 #[instrument]
313 pub fn reload_at(&self, operation: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
314 self.loader().load_at(operation)
315 }
316}
317
318impl Repo for ReadonlyRepo {
319 fn base_repo(&self) -> &ReadonlyRepo {
320 self
321 }
322
323 fn store(&self) -> &Arc<Store> {
324 self.loader.store()
325 }
326
327 fn op_store(&self) -> &Arc<dyn OpStore> {
328 self.loader.op_store()
329 }
330
331 fn index(&self) -> &dyn Index {
332 self.readonly_index().as_index()
333 }
334
335 fn view(&self) -> &View {
336 &self.view
337 }
338
339 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
340 self.loader.submodule_store()
341 }
342
343 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>> {
344 self.change_id_index().resolve_prefix(prefix)
345 }
346
347 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
348 self.change_id_index().shortest_unique_prefix_len(target_id)
349 }
350}
351
352pub type BackendInitializer<'a> =
353 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendInitError> + 'a;
354#[rustfmt::skip] pub type OpStoreInitializer<'a> =
356 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendInitError>
357 + 'a;
358pub type OpHeadsStoreInitializer<'a> =
359 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendInitError> + 'a;
360pub type IndexStoreInitializer<'a> =
361 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendInitError> + 'a;
362pub type SubmoduleStoreInitializer<'a> =
363 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendInitError> + 'a;
364
365type BackendFactory =
366 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendLoadError>>;
367type OpStoreFactory = Box<
368 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendLoadError>,
369>;
370type OpHeadsStoreFactory =
371 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendLoadError>>;
372type IndexStoreFactory =
373 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendLoadError>>;
374type SubmoduleStoreFactory =
375 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendLoadError>>;
376
377pub fn merge_factories_map<F>(base: &mut HashMap<String, F>, ext: HashMap<String, F>) {
378 for (name, factory) in ext {
379 match base.entry(name) {
380 Entry::Vacant(v) => {
381 v.insert(factory);
382 }
383 Entry::Occupied(o) => {
384 panic!("Conflicting factory definitions for '{}' factory", o.key())
385 }
386 }
387 }
388}
389
390pub struct StoreFactories {
391 backend_factories: HashMap<String, BackendFactory>,
392 op_store_factories: HashMap<String, OpStoreFactory>,
393 op_heads_store_factories: HashMap<String, OpHeadsStoreFactory>,
394 index_store_factories: HashMap<String, IndexStoreFactory>,
395 submodule_store_factories: HashMap<String, SubmoduleStoreFactory>,
396}
397
398impl Default for StoreFactories {
399 fn default() -> Self {
400 let mut factories = StoreFactories::empty();
401
402 factories.add_backend(
404 SimpleBackend::name(),
405 Box::new(|_settings, store_path| Ok(Box::new(SimpleBackend::load(store_path)))),
406 );
407 #[cfg(feature = "git")]
408 factories.add_backend(
409 crate::git_backend::GitBackend::name(),
410 Box::new(|settings, store_path| {
411 Ok(Box::new(crate::git_backend::GitBackend::load(
412 settings, store_path,
413 )?))
414 }),
415 );
416 #[cfg(feature = "testing")]
417 factories.add_backend(
418 crate::secret_backend::SecretBackend::name(),
419 Box::new(|settings, store_path| {
420 Ok(Box::new(crate::secret_backend::SecretBackend::load(
421 settings, store_path,
422 )?))
423 }),
424 );
425
426 factories.add_op_store(
428 SimpleOpStore::name(),
429 Box::new(|_settings, store_path, root_data| {
430 Ok(Box::new(SimpleOpStore::load(store_path, root_data)))
431 }),
432 );
433
434 factories.add_op_heads_store(
436 SimpleOpHeadsStore::name(),
437 Box::new(|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::load(store_path)))),
438 );
439
440 factories.add_index_store(
442 DefaultIndexStore::name(),
443 Box::new(|_settings, store_path| Ok(Box::new(DefaultIndexStore::load(store_path)))),
444 );
445
446 factories.add_submodule_store(
448 DefaultSubmoduleStore::name(),
449 Box::new(|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::load(store_path)))),
450 );
451
452 factories
453 }
454}
455
456#[derive(Debug, Error)]
457pub enum StoreLoadError {
458 #[error("Unsupported {store} backend type '{store_type}'")]
459 UnsupportedType {
460 store: &'static str,
461 store_type: String,
462 },
463 #[error("Failed to read {store} backend type")]
464 ReadError {
465 store: &'static str,
466 source: PathError,
467 },
468 #[error(transparent)]
469 Backend(#[from] BackendLoadError),
470 #[error(transparent)]
471 Signing(#[from] SignInitError),
472}
473
474impl StoreFactories {
475 pub fn empty() -> Self {
476 StoreFactories {
477 backend_factories: HashMap::new(),
478 op_store_factories: HashMap::new(),
479 op_heads_store_factories: HashMap::new(),
480 index_store_factories: HashMap::new(),
481 submodule_store_factories: HashMap::new(),
482 }
483 }
484
485 pub fn merge(&mut self, ext: StoreFactories) {
486 let StoreFactories {
487 backend_factories,
488 op_store_factories,
489 op_heads_store_factories,
490 index_store_factories,
491 submodule_store_factories,
492 } = ext;
493
494 merge_factories_map(&mut self.backend_factories, backend_factories);
495 merge_factories_map(&mut self.op_store_factories, op_store_factories);
496 merge_factories_map(&mut self.op_heads_store_factories, op_heads_store_factories);
497 merge_factories_map(&mut self.index_store_factories, index_store_factories);
498 merge_factories_map(
499 &mut self.submodule_store_factories,
500 submodule_store_factories,
501 );
502 }
503
504 pub fn add_backend(&mut self, name: &str, factory: BackendFactory) {
505 self.backend_factories.insert(name.to_string(), factory);
506 }
507
508 pub fn load_backend(
509 &self,
510 settings: &UserSettings,
511 store_path: &Path,
512 ) -> Result<Box<dyn Backend>, StoreLoadError> {
513 let backend_type = read_store_type("commit", store_path.join("type"))?;
514 let backend_factory = self.backend_factories.get(&backend_type).ok_or_else(|| {
515 StoreLoadError::UnsupportedType {
516 store: "commit",
517 store_type: backend_type.to_string(),
518 }
519 })?;
520 Ok(backend_factory(settings, store_path)?)
521 }
522
523 pub fn add_op_store(&mut self, name: &str, factory: OpStoreFactory) {
524 self.op_store_factories.insert(name.to_string(), factory);
525 }
526
527 pub fn load_op_store(
528 &self,
529 settings: &UserSettings,
530 store_path: &Path,
531 root_data: RootOperationData,
532 ) -> Result<Box<dyn OpStore>, StoreLoadError> {
533 let op_store_type = read_store_type("operation", store_path.join("type"))?;
534 let op_store_factory = self.op_store_factories.get(&op_store_type).ok_or_else(|| {
535 StoreLoadError::UnsupportedType {
536 store: "operation",
537 store_type: op_store_type.to_string(),
538 }
539 })?;
540 Ok(op_store_factory(settings, store_path, root_data)?)
541 }
542
543 pub fn add_op_heads_store(&mut self, name: &str, factory: OpHeadsStoreFactory) {
544 self.op_heads_store_factories
545 .insert(name.to_string(), factory);
546 }
547
548 pub fn load_op_heads_store(
549 &self,
550 settings: &UserSettings,
551 store_path: &Path,
552 ) -> Result<Box<dyn OpHeadsStore>, StoreLoadError> {
553 let op_heads_store_type = read_store_type("operation heads", store_path.join("type"))?;
554 let op_heads_store_factory = self
555 .op_heads_store_factories
556 .get(&op_heads_store_type)
557 .ok_or_else(|| StoreLoadError::UnsupportedType {
558 store: "operation heads",
559 store_type: op_heads_store_type.to_string(),
560 })?;
561 Ok(op_heads_store_factory(settings, store_path)?)
562 }
563
564 pub fn add_index_store(&mut self, name: &str, factory: IndexStoreFactory) {
565 self.index_store_factories.insert(name.to_string(), factory);
566 }
567
568 pub fn load_index_store(
569 &self,
570 settings: &UserSettings,
571 store_path: &Path,
572 ) -> Result<Box<dyn IndexStore>, StoreLoadError> {
573 let index_store_type = read_store_type("index", store_path.join("type"))?;
574 let index_store_factory = self
575 .index_store_factories
576 .get(&index_store_type)
577 .ok_or_else(|| StoreLoadError::UnsupportedType {
578 store: "index",
579 store_type: index_store_type.to_string(),
580 })?;
581 Ok(index_store_factory(settings, store_path)?)
582 }
583
584 pub fn add_submodule_store(&mut self, name: &str, factory: SubmoduleStoreFactory) {
585 self.submodule_store_factories
586 .insert(name.to_string(), factory);
587 }
588
589 pub fn load_submodule_store(
590 &self,
591 settings: &UserSettings,
592 store_path: &Path,
593 ) -> Result<Box<dyn SubmoduleStore>, StoreLoadError> {
594 let submodule_store_type = read_store_type("submodule_store", store_path.join("type"))?;
595 let submodule_store_factory = self
596 .submodule_store_factories
597 .get(&submodule_store_type)
598 .ok_or_else(|| StoreLoadError::UnsupportedType {
599 store: "submodule_store",
600 store_type: submodule_store_type.to_string(),
601 })?;
602
603 Ok(submodule_store_factory(settings, store_path)?)
604 }
605}
606
607pub fn read_store_type(
608 store: &'static str,
609 path: impl AsRef<Path>,
610) -> Result<String, StoreLoadError> {
611 let path = path.as_ref();
612 fs::read_to_string(path)
613 .context(path)
614 .map_err(|source| StoreLoadError::ReadError { store, source })
615}
616
617#[derive(Debug, Error)]
618pub enum RepoLoaderError {
619 #[error(transparent)]
620 Backend(#[from] BackendError),
621 #[error(transparent)]
622 IndexRead(#[from] IndexReadError),
623 #[error(transparent)]
624 OpHeadResolution(#[from] OpHeadResolutionError),
625 #[error(transparent)]
626 OpHeadsStoreError(#[from] OpHeadsStoreError),
627 #[error(transparent)]
628 OpStore(#[from] OpStoreError),
629}
630
631#[derive(Clone)]
634pub struct RepoLoader {
635 settings: UserSettings,
636 store: Arc<Store>,
637 op_store: Arc<dyn OpStore>,
638 op_heads_store: Arc<dyn OpHeadsStore>,
639 index_store: Arc<dyn IndexStore>,
640 submodule_store: Arc<dyn SubmoduleStore>,
641}
642
643impl RepoLoader {
644 pub fn new(
645 settings: UserSettings,
646 store: Arc<Store>,
647 op_store: Arc<dyn OpStore>,
648 op_heads_store: Arc<dyn OpHeadsStore>,
649 index_store: Arc<dyn IndexStore>,
650 submodule_store: Arc<dyn SubmoduleStore>,
651 ) -> Self {
652 Self {
653 settings,
654 store,
655 op_store,
656 op_heads_store,
657 index_store,
658 submodule_store,
659 }
660 }
661
662 pub fn init_from_file_system(
666 settings: &UserSettings,
667 repo_path: &Path,
668 store_factories: &StoreFactories,
669 ) -> Result<Self, StoreLoadError> {
670 let store = Store::new(
671 store_factories.load_backend(settings, &repo_path.join("store"))?,
672 Signer::from_settings(settings)?,
673 );
674 let root_op_data = RootOperationData {
675 root_commit_id: store.root_commit_id().clone(),
676 };
677 let op_store = Arc::from(store_factories.load_op_store(
678 settings,
679 &repo_path.join("op_store"),
680 root_op_data,
681 )?);
682 let op_heads_store =
683 Arc::from(store_factories.load_op_heads_store(settings, &repo_path.join("op_heads"))?);
684 let index_store =
685 Arc::from(store_factories.load_index_store(settings, &repo_path.join("index"))?);
686 let submodule_store = Arc::from(
687 store_factories.load_submodule_store(settings, &repo_path.join("submodule_store"))?,
688 );
689 Ok(Self {
690 settings: settings.clone(),
691 store,
692 op_store,
693 op_heads_store,
694 index_store,
695 submodule_store,
696 })
697 }
698
699 pub fn settings(&self) -> &UserSettings {
700 &self.settings
701 }
702
703 pub fn store(&self) -> &Arc<Store> {
704 &self.store
705 }
706
707 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
708 &self.index_store
709 }
710
711 pub fn op_store(&self) -> &Arc<dyn OpStore> {
712 &self.op_store
713 }
714
715 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
716 &self.op_heads_store
717 }
718
719 pub fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
720 &self.submodule_store
721 }
722
723 pub fn load_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
724 let op = op_heads_store::resolve_op_heads(
725 self.op_heads_store.as_ref(),
726 &self.op_store,
727 |op_heads| self._resolve_op_heads(op_heads),
728 )?;
729 let view = op.view()?;
730 self._finish_load(op, view)
731 }
732
733 #[instrument(skip(self))]
734 pub fn load_at(&self, op: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
735 let view = op.view()?;
736 self._finish_load(op.clone(), view)
737 }
738
739 pub fn create_from(
740 &self,
741 operation: Operation,
742 view: View,
743 index: Box<dyn ReadonlyIndex>,
744 ) -> Arc<ReadonlyRepo> {
745 let repo = ReadonlyRepo {
746 loader: self.clone(),
747 operation,
748 index,
749 change_id_index: OnceCell::new(),
750 view,
751 };
752 Arc::new(repo)
753 }
754
755 pub fn root_operation(&self) -> Operation {
760 self.load_operation(self.op_store.root_operation_id())
761 .expect("failed to read root operation")
762 }
763
764 pub fn load_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
766 let data = self.op_store.read_operation(id)?;
767 Ok(Operation::new(self.op_store.clone(), id.clone(), data))
768 }
769
770 pub fn merge_operations(
773 &self,
774 operations: Vec<Operation>,
775 tx_description: Option<&str>,
776 ) -> Result<Operation, RepoLoaderError> {
777 let num_operations = operations.len();
778 let mut operations = operations.into_iter();
779 let Some(base_op) = operations.next() else {
780 return Ok(self.root_operation());
781 };
782 let final_op = if num_operations > 1 {
783 let base_repo = self.load_at(&base_op)?;
784 let mut tx = base_repo.start_transaction();
785 for other_op in operations {
786 tx.merge_operation(other_op)?;
787 tx.repo_mut().rebase_descendants()?;
788 }
789 let tx_description = tx_description.map_or_else(
790 || format!("merge {num_operations} operations"),
791 |tx_description| tx_description.to_string(),
792 );
793 let merged_repo = tx.write(tx_description).leave_unpublished();
794 merged_repo.operation().clone()
795 } else {
796 base_op
797 };
798
799 Ok(final_op)
800 }
801
802 fn _resolve_op_heads(&self, op_heads: Vec<Operation>) -> Result<Operation, RepoLoaderError> {
803 assert!(!op_heads.is_empty());
804 self.merge_operations(op_heads, Some("reconcile divergent operations"))
805 }
806
807 fn _finish_load(
808 &self,
809 operation: Operation,
810 view: View,
811 ) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
812 let index = self.index_store.get_index_at_op(&operation, &self.store)?;
813 let repo = ReadonlyRepo {
814 loader: self.clone(),
815 operation,
816 index,
817 change_id_index: OnceCell::new(),
818 view,
819 };
820 Ok(Arc::new(repo))
821 }
822}
823
824#[derive(Clone, Debug, PartialEq, Eq)]
825enum Rewrite {
826 Rewritten(CommitId),
829 Divergent(Vec<CommitId>),
832 Abandoned(Vec<CommitId>),
835}
836
837impl Rewrite {
838 fn new_parent_ids(&self) -> &[CommitId] {
839 match self {
840 Rewrite::Rewritten(new_parent_id) => std::slice::from_ref(new_parent_id),
841 Rewrite::Divergent(new_parent_ids) => new_parent_ids.as_slice(),
842 Rewrite::Abandoned(new_parent_ids) => new_parent_ids.as_slice(),
843 }
844 }
845}
846
847pub struct MutableRepo {
848 base_repo: Arc<ReadonlyRepo>,
849 index: Box<dyn MutableIndex>,
850 view: DirtyCell<View>,
851 parent_mapping: HashMap<CommitId, Rewrite>,
860}
861
862impl MutableRepo {
863 pub fn new(
864 base_repo: Arc<ReadonlyRepo>,
865 index: &dyn ReadonlyIndex,
866 view: &View,
867 ) -> MutableRepo {
868 let mut_view = view.clone();
869 let mut_index = index.start_modification();
870 MutableRepo {
871 base_repo,
872 index: mut_index,
873 view: DirtyCell::with_clean(mut_view),
874 parent_mapping: Default::default(),
875 }
876 }
877
878 pub fn base_repo(&self) -> &Arc<ReadonlyRepo> {
879 &self.base_repo
880 }
881
882 fn view_mut(&mut self) -> &mut View {
883 self.view.get_mut()
884 }
885
886 pub fn mutable_index(&self) -> &dyn MutableIndex {
887 self.index.as_ref()
888 }
889
890 pub fn has_changes(&self) -> bool {
891 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
892 !(self.parent_mapping.is_empty() && self.view() == &self.base_repo.view)
893 }
894
895 pub(crate) fn consume(self) -> (Box<dyn MutableIndex>, View) {
896 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
897 (self.index, self.view.into_inner())
898 }
899
900 pub fn new_commit(&mut self, parents: Vec<CommitId>, tree_id: MergedTreeId) -> CommitBuilder {
902 let settings = self.base_repo.settings();
903 DetachedCommitBuilder::for_new_commit(self, settings, parents, tree_id).attach(self)
904 }
905
906 pub fn rewrite_commit(&mut self, predecessor: &Commit) -> CommitBuilder {
908 let settings = self.base_repo.settings();
909 DetachedCommitBuilder::for_rewrite_from(self, settings, predecessor).attach(self)
910 }
913
914 pub fn set_rewritten_commit(&mut self, old_id: CommitId, new_id: CommitId) {
921 assert_ne!(old_id, *self.store().root_commit_id());
922 self.parent_mapping
923 .insert(old_id, Rewrite::Rewritten(new_id));
924 }
925
926 pub fn set_divergent_rewrite(
934 &mut self,
935 old_id: CommitId,
936 new_ids: impl IntoIterator<Item = CommitId>,
937 ) {
938 assert_ne!(old_id, *self.store().root_commit_id());
939 self.parent_mapping.insert(
940 old_id.clone(),
941 Rewrite::Divergent(new_ids.into_iter().collect()),
942 );
943 }
944
945 pub fn record_abandoned_commit(&mut self, old_commit: &Commit) {
955 assert_ne!(old_commit.id(), self.store().root_commit_id());
956 self.record_abandoned_commit_with_parents(
958 old_commit.id().clone(),
959 old_commit.parent_ids().iter().cloned(),
960 );
961 }
962
963 pub fn record_abandoned_commit_with_parents(
969 &mut self,
970 old_id: CommitId,
971 new_parent_ids: impl IntoIterator<Item = CommitId>,
972 ) {
973 assert_ne!(old_id, *self.store().root_commit_id());
974 self.parent_mapping.insert(
975 old_id,
976 Rewrite::Abandoned(new_parent_ids.into_iter().collect()),
977 );
978 }
979
980 pub fn has_rewrites(&self) -> bool {
981 !self.parent_mapping.is_empty()
982 }
983
984 pub fn new_parents(&self, old_ids: &[CommitId]) -> Vec<CommitId> {
991 self.rewritten_ids_with(old_ids, |rewrite| !matches!(rewrite, Rewrite::Divergent(_)))
992 }
993
994 fn rewritten_ids_with(
995 &self,
996 old_ids: &[CommitId],
997 mut predicate: impl FnMut(&Rewrite) -> bool,
998 ) -> Vec<CommitId> {
999 assert!(!old_ids.is_empty());
1000 let mut new_ids = Vec::with_capacity(old_ids.len());
1001 let mut to_visit = old_ids.iter().rev().collect_vec();
1002 let mut visited = HashSet::new();
1003 while let Some(id) = to_visit.pop() {
1004 if !visited.insert(id) {
1005 continue;
1006 }
1007 match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1008 None => {
1009 new_ids.push(id.clone());
1010 }
1011 Some(rewrite) => {
1012 let replacements = rewrite.new_parent_ids();
1013 assert!(
1014 !replacements.is_empty(),
1019 "Found empty value for key {id:?} in the parent mapping",
1020 );
1021 to_visit.extend(replacements.iter().rev());
1022 }
1023 }
1024 }
1025 assert!(
1026 !new_ids.is_empty(),
1027 "new ids become empty because of cycle in the parent mapping"
1028 );
1029 debug_assert!(new_ids.iter().all_unique());
1030 new_ids
1031 }
1032
1033 fn resolve_rewrite_mapping_with(
1037 &self,
1038 mut predicate: impl FnMut(&Rewrite) -> bool,
1039 ) -> HashMap<CommitId, Vec<CommitId>> {
1040 let sorted_ids = dag_walk::topo_order_forward(
1041 self.parent_mapping.keys(),
1042 |&id| id,
1043 |&id| match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1044 None => &[],
1045 Some(rewrite) => rewrite.new_parent_ids(),
1046 },
1047 );
1048 let mut new_mapping: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1049 for old_id in sorted_ids {
1050 let Some(rewrite) = self.parent_mapping.get(old_id).filter(|&v| predicate(v)) else {
1051 continue;
1052 };
1053 let lookup = |id| new_mapping.get(id).map_or(slice::from_ref(id), |ids| ids);
1054 let new_ids = match rewrite.new_parent_ids() {
1055 [id] => lookup(id).to_vec(), ids => ids.iter().flat_map(lookup).unique().cloned().collect(),
1057 };
1058 debug_assert_eq!(
1059 new_ids,
1060 self.rewritten_ids_with(slice::from_ref(old_id), &mut predicate)
1061 );
1062 new_mapping.insert(old_id.clone(), new_ids);
1063 }
1064 new_mapping
1065 }
1066
1067 pub fn update_rewritten_references(
1070 &mut self,
1071 options: &RewriteRefsOptions,
1072 ) -> BackendResult<()> {
1073 self.update_all_references(options)?;
1074 self.update_heads();
1075 Ok(())
1076 }
1077
1078 fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> {
1079 let rewrite_mapping = self.resolve_rewrite_mapping_with(|_| true);
1080 self.update_local_bookmarks(&rewrite_mapping, options);
1081 self.update_wc_commits(&rewrite_mapping)?;
1082 Ok(())
1083 }
1084
1085 fn update_local_bookmarks(
1086 &mut self,
1087 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1088 options: &RewriteRefsOptions,
1089 ) {
1090 let changed_branches = self
1091 .view()
1092 .local_bookmarks()
1093 .flat_map(|(name, target)| {
1094 target.added_ids().filter_map(|id| {
1095 let change = rewrite_mapping.get_key_value(id)?;
1096 Some((name.to_owned(), change))
1097 })
1098 })
1099 .collect_vec();
1100 for (bookmark_name, (old_commit_id, new_commit_ids)) in changed_branches {
1101 let should_delete = options.delete_abandoned_bookmarks
1102 && matches!(
1103 self.parent_mapping.get(old_commit_id),
1104 Some(Rewrite::Abandoned(_))
1105 );
1106 let old_target = RefTarget::normal(old_commit_id.clone());
1107 let new_target = if should_delete {
1108 RefTarget::absent()
1109 } else {
1110 let ids = itertools::intersperse(new_commit_ids, old_commit_id)
1111 .map(|id| Some(id.clone()));
1112 RefTarget::from_merge(MergeBuilder::from_iter(ids).build())
1113 };
1114
1115 self.merge_local_bookmark(&bookmark_name, &old_target, &new_target);
1116 }
1117 }
1118
1119 fn update_wc_commits(
1120 &mut self,
1121 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1122 ) -> BackendResult<()> {
1123 let changed_wc_commits = self
1124 .view()
1125 .wc_commit_ids()
1126 .iter()
1127 .filter_map(|(name, commit_id)| {
1128 let change = rewrite_mapping.get_key_value(commit_id)?;
1129 Some((name.to_owned(), change))
1130 })
1131 .collect_vec();
1132 let mut recreated_wc_commits: HashMap<&CommitId, Commit> = HashMap::new();
1133 for (name, (old_commit_id, new_commit_ids)) in changed_wc_commits {
1134 let abandoned_old_commit = matches!(
1135 self.parent_mapping.get(old_commit_id),
1136 Some(Rewrite::Abandoned(_))
1137 );
1138 let new_wc_commit = if !abandoned_old_commit {
1139 self.store().get_commit(&new_commit_ids[0])?
1141 } else if let Some(commit) = recreated_wc_commits.get(old_commit_id) {
1142 commit.clone()
1143 } else {
1144 let new_commits: Vec<_> = new_commit_ids
1145 .iter()
1146 .map(|id| self.store().get_commit(id))
1147 .try_collect()?;
1148 let merged_parents_tree = merge_commit_trees(self, &new_commits)?;
1149 let commit = self
1150 .new_commit(new_commit_ids.clone(), merged_parents_tree.id().clone())
1151 .write()?;
1152 recreated_wc_commits.insert(old_commit_id, commit.clone());
1153 commit
1154 };
1155 self.edit(name, &new_wc_commit).unwrap();
1156 }
1157 Ok(())
1158 }
1159
1160 fn update_heads(&mut self) {
1161 let old_commits_expression =
1162 RevsetExpression::commits(self.parent_mapping.keys().cloned().collect());
1163 let heads_to_add_expression = old_commits_expression
1164 .parents()
1165 .minus(&old_commits_expression);
1166 let heads_to_add = heads_to_add_expression
1167 .evaluate(self)
1168 .unwrap()
1169 .iter()
1170 .map(Result::unwrap); let mut view = self.view().store_view().clone();
1173 for commit_id in self.parent_mapping.keys() {
1174 view.head_ids.remove(commit_id);
1175 }
1176 view.head_ids.extend(heads_to_add);
1177 self.set_view(view);
1178 }
1179
1180 fn find_descendants_to_rebase(&self, roots: Vec<CommitId>) -> BackendResult<Vec<Commit>> {
1185 let store = self.store();
1186 let to_visit_expression =
1187 RevsetExpression::commits(roots)
1188 .descendants()
1189 .minus(&RevsetExpression::commits(
1190 self.parent_mapping.keys().cloned().collect(),
1191 ));
1192 let to_visit_revset = to_visit_expression
1193 .evaluate(self)
1194 .map_err(|err| err.expect_backend_error())?;
1195 let to_visit: Vec<_> = to_visit_revset
1196 .iter()
1197 .commits(store)
1198 .try_collect()
1199 .map_err(|err| err.expect_backend_error())?;
1201 drop(to_visit_revset);
1202 let to_visit_set: HashSet<CommitId> =
1203 to_visit.iter().map(|commit| commit.id().clone()).collect();
1204 let mut visited = HashSet::new();
1205 dag_walk::topo_order_reverse_ok(
1208 to_visit.into_iter().map(Ok),
1209 |commit| commit.id().clone(),
1210 |commit| -> Vec<BackendResult<Commit>> {
1211 visited.insert(commit.id().clone());
1212 let mut dependents = vec![];
1213 for parent in commit.parents() {
1214 let Ok(parent) = parent else {
1215 dependents.push(parent);
1216 continue;
1217 };
1218 if let Some(rewrite) = self.parent_mapping.get(parent.id()) {
1219 for target in rewrite.new_parent_ids() {
1220 if to_visit_set.contains(target) && !visited.contains(target) {
1221 dependents.push(store.get_commit(target));
1222 }
1223 }
1224 }
1225 if to_visit_set.contains(parent.id()) {
1226 dependents.push(Ok(parent));
1227 }
1228 }
1229 dependents
1230 },
1231 )
1232 }
1233
1234 pub fn transform_descendants(
1246 &mut self,
1247 roots: Vec<CommitId>,
1248 callback: impl FnMut(CommitRewriter) -> BackendResult<()>,
1249 ) -> BackendResult<()> {
1250 let options = RewriteRefsOptions::default();
1251 self.transform_descendants_with_options(roots, &options, callback)
1252 }
1253
1254 pub fn transform_descendants_with_options(
1258 &mut self,
1259 roots: Vec<CommitId>,
1260 options: &RewriteRefsOptions,
1261 mut callback: impl FnMut(CommitRewriter) -> BackendResult<()>,
1262 ) -> BackendResult<()> {
1263 let mut to_visit = self.find_descendants_to_rebase(roots)?;
1264 while let Some(old_commit) = to_visit.pop() {
1265 let new_parent_ids = self.new_parents(old_commit.parent_ids());
1266 let rewriter = CommitRewriter::new(self, old_commit, new_parent_ids);
1267 callback(rewriter)?;
1268 }
1269 self.update_rewritten_references(options)?;
1270 Ok(())
1280 }
1281
1282 pub fn rebase_descendants_with_options(
1298 &mut self,
1299 options: &RebaseOptions,
1300 mut progress: impl FnMut(Commit, RebasedCommit),
1301 ) -> BackendResult<()> {
1302 let roots = self.parent_mapping.keys().cloned().collect();
1303 self.transform_descendants_with_options(roots, &options.rewrite_refs, |rewriter| {
1304 if rewriter.parents_changed() {
1305 let old_commit = rewriter.old_commit().clone();
1306 let rebased_commit = rebase_commit_with_options(rewriter, options)?;
1307 progress(old_commit, rebased_commit);
1308 }
1309 Ok(())
1310 })?;
1311 self.parent_mapping.clear();
1312 Ok(())
1313 }
1314
1315 pub fn rebase_descendants(&mut self) -> BackendResult<usize> {
1325 let options = RebaseOptions::default();
1326 let mut num_rebased = 0;
1327 self.rebase_descendants_with_options(&options, |_old_commit, _rebased_commit| {
1328 num_rebased += 1;
1329 })?;
1330 Ok(num_rebased)
1331 }
1332
1333 pub fn reparent_descendants(&mut self) -> BackendResult<usize> {
1340 let roots = self.parent_mapping.keys().cloned().collect_vec();
1341 let mut num_reparented = 0;
1342 self.transform_descendants(roots, |rewriter| {
1343 if rewriter.parents_changed() {
1344 let builder = rewriter.reparent();
1345 builder.write()?;
1346 num_reparented += 1;
1347 }
1348 Ok(())
1349 })?;
1350 self.parent_mapping.clear();
1351 Ok(num_reparented)
1352 }
1353
1354 pub fn set_wc_commit(
1355 &mut self,
1356 name: WorkspaceNameBuf,
1357 commit_id: CommitId,
1358 ) -> Result<(), RewriteRootCommit> {
1359 if &commit_id == self.store().root_commit_id() {
1360 return Err(RewriteRootCommit);
1361 }
1362 self.view_mut().set_wc_commit(name, commit_id);
1363 Ok(())
1364 }
1365
1366 pub fn remove_wc_commit(&mut self, name: &WorkspaceName) -> Result<(), EditCommitError> {
1367 self.maybe_abandon_wc_commit(name)?;
1368 self.view_mut().remove_wc_commit(name);
1369 Ok(())
1370 }
1371
1372 pub fn rename_workspace(
1373 &mut self,
1374 old_name: &WorkspaceName,
1375 new_name: WorkspaceNameBuf,
1376 ) -> Result<(), RenameWorkspaceError> {
1377 self.view_mut().rename_workspace(old_name, new_name)
1378 }
1379
1380 pub fn check_out(
1381 &mut self,
1382 name: WorkspaceNameBuf,
1383 commit: &Commit,
1384 ) -> Result<Commit, CheckOutCommitError> {
1385 let wc_commit = self
1386 .new_commit(vec![commit.id().clone()], commit.tree_id().clone())
1387 .write()?;
1388 self.edit(name, &wc_commit)?;
1389 Ok(wc_commit)
1390 }
1391
1392 pub fn edit(&mut self, name: WorkspaceNameBuf, commit: &Commit) -> Result<(), EditCommitError> {
1393 self.maybe_abandon_wc_commit(&name)?;
1394 self.add_head(commit)?;
1395 Ok(self.set_wc_commit(name, commit.id().clone())?)
1396 }
1397
1398 fn maybe_abandon_wc_commit(
1399 &mut self,
1400 workspace_name: &WorkspaceName,
1401 ) -> Result<(), EditCommitError> {
1402 let is_commit_referenced = |view: &View, commit_id: &CommitId| -> bool {
1403 view.wc_commit_ids()
1404 .iter()
1405 .filter(|&(name, _)| name != workspace_name)
1406 .map(|(_, wc_id)| wc_id)
1407 .chain(
1408 view.local_bookmarks()
1409 .flat_map(|(_, target)| target.added_ids()),
1410 )
1411 .any(|id| id == commit_id)
1412 };
1413
1414 let maybe_wc_commit_id = self
1415 .view
1416 .with_ref(|v| v.get_wc_commit_id(workspace_name).cloned());
1417 if let Some(wc_commit_id) = maybe_wc_commit_id {
1418 let wc_commit = self
1419 .store()
1420 .get_commit(&wc_commit_id)
1421 .map_err(EditCommitError::WorkingCopyCommitNotFound)?;
1422 if wc_commit.is_discardable(self)?
1423 && self
1424 .view
1425 .with_ref(|v| !is_commit_referenced(v, wc_commit.id()))
1426 && self.view().heads().contains(wc_commit.id())
1427 {
1428 self.record_abandoned_commit(&wc_commit);
1432 }
1433 }
1434
1435 Ok(())
1436 }
1437
1438 fn enforce_view_invariants(&self, view: &mut View) {
1439 let view = view.store_view_mut();
1440 let root_commit_id = self.store().root_commit_id();
1441 if view.head_ids.is_empty() {
1442 view.head_ids.insert(root_commit_id.clone());
1443 } else if view.head_ids.len() > 1 {
1444 view.head_ids.remove(root_commit_id);
1447 view.head_ids = self
1451 .index()
1452 .heads(&mut view.head_ids.iter())
1453 .unwrap()
1454 .into_iter()
1455 .collect();
1456 }
1457 assert!(!view.head_ids.is_empty());
1458 }
1459
1460 pub fn add_head(&mut self, head: &Commit) -> BackendResult<()> {
1463 self.add_heads(slice::from_ref(head))
1464 }
1465
1466 pub fn add_heads(&mut self, heads: &[Commit]) -> BackendResult<()> {
1473 let current_heads = self.view.get_mut().heads();
1474 match heads {
1478 [] => {}
1479 [head]
1480 if head
1481 .parent_ids()
1482 .iter()
1483 .all(|parent_id| current_heads.contains(parent_id)) =>
1484 {
1485 self.index.add_commit(head);
1486 self.view.get_mut().add_head(head.id());
1487 for parent_id in head.parent_ids() {
1488 self.view.get_mut().remove_head(parent_id);
1489 }
1490 }
1491 _ => {
1492 let missing_commits = dag_walk::topo_order_reverse_ord_ok(
1493 heads
1494 .iter()
1495 .cloned()
1496 .map(CommitByCommitterTimestamp)
1497 .map(Ok),
1498 |CommitByCommitterTimestamp(commit)| commit.id().clone(),
1499 |CommitByCommitterTimestamp(commit)| {
1500 commit
1501 .parent_ids()
1502 .iter()
1503 .filter(|id| !self.index().has_id(id))
1504 .map(|id| self.store().get_commit(id))
1505 .map_ok(CommitByCommitterTimestamp)
1506 .collect_vec()
1507 },
1508 )?;
1509 for CommitByCommitterTimestamp(missing_commit) in missing_commits.iter().rev() {
1510 self.index.add_commit(missing_commit);
1511 }
1512 for head in heads {
1513 self.view.get_mut().add_head(head.id());
1514 }
1515 self.view.mark_dirty();
1516 }
1517 }
1518 Ok(())
1519 }
1520
1521 pub fn remove_head(&mut self, head: &CommitId) {
1522 self.view_mut().remove_head(head);
1523 self.view.mark_dirty();
1524 }
1525
1526 pub fn get_local_bookmark(&self, name: &RefName) -> RefTarget {
1527 self.view.with_ref(|v| v.get_local_bookmark(name).clone())
1528 }
1529
1530 pub fn set_local_bookmark_target(&mut self, name: &RefName, target: RefTarget) {
1531 let view = self.view_mut();
1532 for id in target.added_ids() {
1533 view.add_head(id);
1534 }
1535 view.set_local_bookmark_target(name, target);
1536 }
1537
1538 pub fn merge_local_bookmark(
1539 &mut self,
1540 name: &RefName,
1541 base_target: &RefTarget,
1542 other_target: &RefTarget,
1543 ) {
1544 let view = self.view.get_mut();
1545 let index = self.index.as_index();
1546 let self_target = view.get_local_bookmark(name);
1547 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1548 self.set_local_bookmark_target(name, new_target);
1549 }
1550
1551 pub fn get_remote_bookmark(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1552 self.view
1553 .with_ref(|v| v.get_remote_bookmark(symbol).clone())
1554 }
1555
1556 pub fn set_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1557 self.view_mut().set_remote_bookmark(symbol, remote_ref);
1558 }
1559
1560 fn merge_remote_bookmark(
1561 &mut self,
1562 symbol: RemoteRefSymbol<'_>,
1563 base_ref: &RemoteRef,
1564 other_ref: &RemoteRef,
1565 ) {
1566 let view = self.view.get_mut();
1567 let index = self.index.as_index();
1568 let self_ref = view.get_remote_bookmark(symbol);
1569 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref);
1570 view.set_remote_bookmark(symbol, new_ref);
1571 }
1572
1573 pub fn track_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1576 let mut remote_ref = self.get_remote_bookmark(symbol);
1577 let base_target = remote_ref.tracked_target();
1578 self.merge_local_bookmark(symbol.name, base_target, &remote_ref.target);
1579 remote_ref.state = RemoteRefState::Tracked;
1580 self.set_remote_bookmark(symbol, remote_ref);
1581 }
1582
1583 pub fn untrack_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1585 let mut remote_ref = self.get_remote_bookmark(symbol);
1586 remote_ref.state = RemoteRefState::New;
1587 self.set_remote_bookmark(symbol, remote_ref);
1588 }
1589
1590 pub fn remove_remote(&mut self, remote_name: &RemoteName) {
1591 self.view_mut().remove_remote(remote_name);
1592 }
1593
1594 pub fn rename_remote(&mut self, old: &RemoteName, new: &RemoteName) {
1595 self.view_mut().rename_remote(old, new);
1596 }
1597
1598 pub fn get_tag(&self, name: &RefName) -> RefTarget {
1599 self.view.with_ref(|v| v.get_tag(name).clone())
1600 }
1601
1602 pub fn set_tag_target(&mut self, name: &RefName, target: RefTarget) {
1603 self.view_mut().set_tag_target(name, target);
1604 }
1605
1606 pub fn merge_tag(&mut self, name: &RefName, base_target: &RefTarget, other_target: &RefTarget) {
1607 let view = self.view.get_mut();
1608 let index = self.index.as_index();
1609 let self_target = view.get_tag(name);
1610 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1611 view.set_tag_target(name, new_target);
1612 }
1613
1614 pub fn get_git_ref(&self, name: &GitRefName) -> RefTarget {
1615 self.view.with_ref(|v| v.get_git_ref(name).clone())
1616 }
1617
1618 pub fn set_git_ref_target(&mut self, name: &GitRefName, target: RefTarget) {
1619 self.view_mut().set_git_ref_target(name, target);
1620 }
1621
1622 fn merge_git_ref(
1623 &mut self,
1624 name: &GitRefName,
1625 base_target: &RefTarget,
1626 other_target: &RefTarget,
1627 ) {
1628 let view = self.view.get_mut();
1629 let index = self.index.as_index();
1630 let self_target = view.get_git_ref(name);
1631 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1632 view.set_git_ref_target(name, new_target);
1633 }
1634
1635 pub fn git_head(&self) -> RefTarget {
1636 self.view.with_ref(|v| v.git_head().clone())
1637 }
1638
1639 pub fn set_git_head_target(&mut self, target: RefTarget) {
1640 self.view_mut().set_git_head_target(target);
1641 }
1642
1643 pub fn set_view(&mut self, data: op_store::View) {
1644 self.view_mut().set_view(data);
1645 self.view.mark_dirty();
1646 }
1647
1648 pub fn merge(
1649 &mut self,
1650 base_repo: &ReadonlyRepo,
1651 other_repo: &ReadonlyRepo,
1652 ) -> BackendResult<()> {
1653 self.index.merge_in(base_repo.readonly_index());
1658 self.index.merge_in(other_repo.readonly_index());
1659
1660 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
1661 self.merge_view(&base_repo.view, &other_repo.view)?;
1662 self.view.mark_dirty();
1663 Ok(())
1664 }
1665
1666 pub fn merge_index(&mut self, other_repo: &ReadonlyRepo) {
1667 self.index.merge_in(other_repo.readonly_index());
1668 }
1669
1670 fn merge_view(&mut self, base: &View, other: &View) -> BackendResult<()> {
1671 for (name, base_wc_commit) in base.wc_commit_ids() {
1674 let self_wc_commit = self.view().get_wc_commit_id(name);
1675 let other_wc_commit = other.get_wc_commit_id(name);
1676 if other_wc_commit == Some(base_wc_commit) || other_wc_commit == self_wc_commit {
1677 } else if let Some(other_wc_commit) = other_wc_commit {
1680 if self_wc_commit == Some(base_wc_commit) {
1681 self.view_mut()
1682 .set_wc_commit(name.clone(), other_wc_commit.clone());
1683 }
1684 } else {
1685 self.view_mut().remove_wc_commit(name);
1688 }
1689 }
1690 for (name, other_wc_commit) in other.wc_commit_ids() {
1691 if self.view().get_wc_commit_id(name).is_none() && base.get_wc_commit_id(name).is_none()
1692 {
1693 self.view_mut()
1695 .set_wc_commit(name.clone(), other_wc_commit.clone());
1696 }
1697 }
1698 let base_heads = base.heads().iter().cloned().collect_vec();
1699 let own_heads = self.view().heads().iter().cloned().collect_vec();
1700 let other_heads = other.heads().iter().cloned().collect_vec();
1701
1702 if self.index.as_any().is::<DefaultMutableIndex>() {
1709 self.record_rewrites(&base_heads, &own_heads)?;
1710 self.record_rewrites(&base_heads, &other_heads)?;
1711 } else {
1714 for removed_head in base.heads().difference(other.heads()) {
1715 self.view_mut().remove_head(removed_head);
1716 }
1717 }
1718 for added_head in other.heads().difference(base.heads()) {
1719 self.view_mut().add_head(added_head);
1720 }
1721
1722 let changed_local_bookmarks =
1723 diff_named_ref_targets(base.local_bookmarks(), other.local_bookmarks());
1724 for (name, (base_target, other_target)) in changed_local_bookmarks {
1725 self.merge_local_bookmark(name, base_target, other_target);
1726 }
1727
1728 let changed_tags = diff_named_ref_targets(base.tags(), other.tags());
1729 for (name, (base_target, other_target)) in changed_tags {
1730 self.merge_tag(name, base_target, other_target);
1731 }
1732
1733 let changed_git_refs = diff_named_ref_targets(base.git_refs(), other.git_refs());
1734 for (name, (base_target, other_target)) in changed_git_refs {
1735 self.merge_git_ref(name, base_target, other_target);
1736 }
1737
1738 let changed_remote_bookmarks =
1739 diff_named_remote_refs(base.all_remote_bookmarks(), other.all_remote_bookmarks());
1740 for (symbol, (base_ref, other_ref)) in changed_remote_bookmarks {
1741 self.merge_remote_bookmark(symbol, base_ref, other_ref);
1742 }
1743
1744 let new_git_head_target = merge_ref_targets(
1745 self.index(),
1746 self.view().git_head(),
1747 base.git_head(),
1748 other.git_head(),
1749 );
1750 self.set_git_head_target(new_git_head_target);
1751
1752 Ok(())
1753 }
1754
1755 fn record_rewrites(
1758 &mut self,
1759 old_heads: &[CommitId],
1760 new_heads: &[CommitId],
1761 ) -> BackendResult<()> {
1762 let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
1763 for item in revset::walk_revs(self, old_heads, new_heads)
1764 .map_err(|err| err.expect_backend_error())?
1765 .commit_change_ids()
1766 {
1767 let (commit_id, change_id) = item.map_err(|err| err.expect_backend_error())?;
1768 removed_changes
1769 .entry(change_id)
1770 .or_default()
1771 .push(commit_id);
1772 }
1773 if removed_changes.is_empty() {
1774 return Ok(());
1775 }
1776
1777 let mut rewritten_changes = HashSet::new();
1778 let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1779 for item in revset::walk_revs(self, new_heads, old_heads)
1780 .map_err(|err| err.expect_backend_error())?
1781 .commit_change_ids()
1782 {
1783 let (commit_id, change_id) = item.map_err(|err| err.expect_backend_error())?;
1784 if let Some(old_commits) = removed_changes.get(&change_id) {
1785 for old_commit in old_commits {
1786 rewritten_commits
1787 .entry(old_commit.clone())
1788 .or_default()
1789 .push(commit_id.clone());
1790 }
1791 }
1792 rewritten_changes.insert(change_id);
1793 }
1794 for (old_commit, new_commits) in rewritten_commits {
1795 if new_commits.len() == 1 {
1796 self.set_rewritten_commit(
1797 old_commit.clone(),
1798 new_commits.into_iter().next().unwrap(),
1799 );
1800 } else {
1801 self.set_divergent_rewrite(old_commit.clone(), new_commits);
1802 }
1803 }
1804
1805 for (change_id, removed_commit_ids) in &removed_changes {
1806 if !rewritten_changes.contains(change_id) {
1807 for id in removed_commit_ids {
1808 let commit = self.store().get_commit(id)?;
1809 self.record_abandoned_commit(&commit);
1810 }
1811 }
1812 }
1813
1814 Ok(())
1815 }
1816}
1817
1818impl Repo for MutableRepo {
1819 fn base_repo(&self) -> &ReadonlyRepo {
1820 &self.base_repo
1821 }
1822
1823 fn store(&self) -> &Arc<Store> {
1824 self.base_repo.store()
1825 }
1826
1827 fn op_store(&self) -> &Arc<dyn OpStore> {
1828 self.base_repo.op_store()
1829 }
1830
1831 fn index(&self) -> &dyn Index {
1832 self.index.as_index()
1833 }
1834
1835 fn view(&self) -> &View {
1836 self.view
1837 .get_or_ensure_clean(|v| self.enforce_view_invariants(v))
1838 }
1839
1840 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
1841 self.base_repo.submodule_store()
1842 }
1843
1844 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>> {
1845 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
1846 change_id_index.resolve_prefix(prefix)
1847 }
1848
1849 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
1850 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
1851 change_id_index.shortest_unique_prefix_len(target_id)
1852 }
1853}
1854
1855#[derive(Debug, Error)]
1857#[error("Cannot rewrite the root commit")]
1858pub struct RewriteRootCommit;
1859
1860#[derive(Debug, Error)]
1862pub enum EditCommitError {
1863 #[error("Current working-copy commit not found")]
1864 WorkingCopyCommitNotFound(#[source] BackendError),
1865 #[error(transparent)]
1866 RewriteRootCommit(#[from] RewriteRootCommit),
1867 #[error(transparent)]
1868 BackendError(#[from] BackendError),
1869}
1870
1871#[derive(Debug, Error)]
1873pub enum CheckOutCommitError {
1874 #[error("Failed to create new working-copy commit")]
1875 CreateCommit(#[from] BackendError),
1876 #[error("Failed to edit commit")]
1877 EditCommit(#[from] EditCommitError),
1878}
1879
1880mod dirty_cell {
1881 use std::cell::OnceCell;
1882 use std::cell::RefCell;
1883
1884 #[derive(Clone, Debug)]
1888 pub struct DirtyCell<T> {
1889 clean: OnceCell<Box<T>>,
1892 dirty: RefCell<Option<Box<T>>>,
1893 }
1894
1895 impl<T> DirtyCell<T> {
1896 pub fn with_clean(value: T) -> Self {
1897 DirtyCell {
1898 clean: OnceCell::from(Box::new(value)),
1899 dirty: RefCell::new(None),
1900 }
1901 }
1902
1903 pub fn get_or_ensure_clean(&self, f: impl FnOnce(&mut T)) -> &T {
1904 self.clean.get_or_init(|| {
1905 let mut value = self.dirty.borrow_mut().take().unwrap();
1907 f(&mut value);
1908 value
1909 })
1910 }
1911
1912 pub fn ensure_clean(&self, f: impl FnOnce(&mut T)) {
1913 self.get_or_ensure_clean(f);
1914 }
1915
1916 pub fn into_inner(self) -> T {
1917 *self
1918 .clean
1919 .into_inner()
1920 .or_else(|| self.dirty.into_inner())
1921 .unwrap()
1922 }
1923
1924 pub fn with_ref<R>(&self, f: impl FnOnce(&T) -> R) -> R {
1925 if let Some(value) = self.clean.get() {
1926 f(value)
1927 } else {
1928 f(self.dirty.borrow().as_ref().unwrap())
1929 }
1930 }
1931
1932 pub fn get_mut(&mut self) -> &mut T {
1933 self.clean
1934 .get_mut()
1935 .or_else(|| self.dirty.get_mut().as_mut())
1936 .unwrap()
1937 }
1938
1939 pub fn mark_dirty(&mut self) {
1940 if let Some(value) = self.clean.take() {
1941 *self.dirty.get_mut() = Some(value);
1942 }
1943 }
1944 }
1945}