1#![allow(missing_docs)]
16
17use std::collections::hash_map::Entry;
18use std::collections::BTreeMap;
19use std::collections::HashMap;
20use std::collections::HashSet;
21use std::fmt::Debug;
22use std::fmt::Formatter;
23use std::fs;
24use std::path::Path;
25use std::slice;
26use std::sync::Arc;
27
28use itertools::Itertools as _;
29use once_cell::sync::OnceCell;
30use thiserror::Error;
31use tracing::instrument;
32
33use self::dirty_cell::DirtyCell;
34use crate::backend::Backend;
35use crate::backend::BackendError;
36use crate::backend::BackendInitError;
37use crate::backend::BackendLoadError;
38use crate::backend::BackendResult;
39use crate::backend::ChangeId;
40use crate::backend::CommitId;
41use crate::backend::MergedTreeId;
42use crate::commit::Commit;
43use crate::commit::CommitByCommitterTimestamp;
44use crate::commit_builder::CommitBuilder;
45use crate::commit_builder::DetachedCommitBuilder;
46use crate::dag_walk;
47use crate::default_index::DefaultIndexStore;
48use crate::default_index::DefaultMutableIndex;
49use crate::default_submodule_store::DefaultSubmoduleStore;
50use crate::file_util::IoResultExt as _;
51use crate::file_util::PathError;
52use crate::index::ChangeIdIndex;
53use crate::index::Index;
54use crate::index::IndexReadError;
55use crate::index::IndexStore;
56use crate::index::MutableIndex;
57use crate::index::ReadonlyIndex;
58use crate::merge::trivial_merge;
59use crate::merge::MergeBuilder;
60use crate::object_id::HexPrefix;
61use crate::object_id::PrefixResolution;
62use crate::op_heads_store;
63use crate::op_heads_store::OpHeadResolutionError;
64use crate::op_heads_store::OpHeadsStore;
65use crate::op_heads_store::OpHeadsStoreError;
66use crate::op_store;
67use crate::op_store::OpStore;
68use crate::op_store::OpStoreError;
69use crate::op_store::OpStoreResult;
70use crate::op_store::OperationId;
71use crate::op_store::RefTarget;
72use crate::op_store::RemoteRef;
73use crate::op_store::RemoteRefState;
74use crate::op_store::RootOperationData;
75use crate::operation::Operation;
76use crate::ref_name::GitRefName;
77use crate::ref_name::RefName;
78use crate::ref_name::RemoteName;
79use crate::ref_name::RemoteRefSymbol;
80use crate::ref_name::WorkspaceName;
81use crate::ref_name::WorkspaceNameBuf;
82use crate::refs::diff_named_commit_ids;
83use crate::refs::diff_named_ref_targets;
84use crate::refs::diff_named_remote_refs;
85use crate::refs::merge_ref_targets;
86use crate::refs::merge_remote_refs;
87use crate::revset;
88use crate::revset::RevsetEvaluationError;
89use crate::revset::RevsetExpression;
90use crate::revset::RevsetIteratorExt as _;
91use crate::rewrite::merge_commit_trees;
92use crate::rewrite::rebase_commit_with_options;
93use crate::rewrite::CommitRewriter;
94use crate::rewrite::RebaseOptions;
95use crate::rewrite::RebasedCommit;
96use crate::rewrite::RewriteRefsOptions;
97use crate::settings::UserSettings;
98use crate::signing::SignInitError;
99use crate::signing::Signer;
100use crate::simple_backend::SimpleBackend;
101use crate::simple_op_heads_store::SimpleOpHeadsStore;
102use crate::simple_op_store::SimpleOpStore;
103use crate::store::Store;
104use crate::submodule_store::SubmoduleStore;
105use crate::transaction::Transaction;
106use crate::transaction::TransactionCommitError;
107use crate::view::RenameWorkspaceError;
108use crate::view::View;
109
110pub trait Repo {
111 fn base_repo(&self) -> &ReadonlyRepo;
114
115 fn store(&self) -> &Arc<Store>;
116
117 fn op_store(&self) -> &Arc<dyn OpStore>;
118
119 fn index(&self) -> &dyn Index;
120
121 fn view(&self) -> &View;
122
123 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore>;
124
125 fn resolve_change_id(&self, change_id: &ChangeId) -> Option<Vec<CommitId>> {
126 let prefix = HexPrefix::from_id(change_id);
128 match self.resolve_change_id_prefix(&prefix) {
129 PrefixResolution::NoMatch => None,
130 PrefixResolution::SingleMatch(entries) => Some(entries),
131 PrefixResolution::AmbiguousMatch => panic!("complete change_id should be unambiguous"),
132 }
133 }
134
135 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>>;
136
137 fn shortest_unique_change_id_prefix_len(&self, target_id_bytes: &ChangeId) -> usize;
138}
139
140pub struct ReadonlyRepo {
141 loader: RepoLoader,
142 operation: Operation,
143 index: Box<dyn ReadonlyIndex>,
144 change_id_index: OnceCell<Box<dyn ChangeIdIndex>>,
145 view: View,
147}
148
149impl Debug for ReadonlyRepo {
150 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
151 f.debug_struct("ReadonlyRepo")
152 .field("store", &self.loader.store)
153 .finish_non_exhaustive()
154 }
155}
156
157#[derive(Error, Debug)]
158pub enum RepoInitError {
159 #[error(transparent)]
160 Backend(#[from] BackendInitError),
161 #[error(transparent)]
162 OpHeadsStore(#[from] OpHeadsStoreError),
163 #[error(transparent)]
164 Path(#[from] PathError),
165}
166
167impl ReadonlyRepo {
168 pub fn default_op_store_initializer() -> &'static OpStoreInitializer<'static> {
169 &|_settings, store_path, root_data| {
170 Ok(Box::new(SimpleOpStore::init(store_path, root_data)?))
171 }
172 }
173
174 pub fn default_op_heads_store_initializer() -> &'static OpHeadsStoreInitializer<'static> {
175 &|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::init(store_path)?))
176 }
177
178 pub fn default_index_store_initializer() -> &'static IndexStoreInitializer<'static> {
179 &|_settings, store_path| Ok(Box::new(DefaultIndexStore::init(store_path)?))
180 }
181
182 pub fn default_submodule_store_initializer() -> &'static SubmoduleStoreInitializer<'static> {
183 &|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::init(store_path)))
184 }
185
186 #[expect(clippy::too_many_arguments)]
187 pub fn init(
188 settings: &UserSettings,
189 repo_path: &Path,
190 backend_initializer: &BackendInitializer,
191 signer: Signer,
192 op_store_initializer: &OpStoreInitializer,
193 op_heads_store_initializer: &OpHeadsStoreInitializer,
194 index_store_initializer: &IndexStoreInitializer,
195 submodule_store_initializer: &SubmoduleStoreInitializer,
196 ) -> Result<Arc<ReadonlyRepo>, RepoInitError> {
197 let repo_path = dunce::canonicalize(repo_path).context(repo_path)?;
198
199 let store_path = repo_path.join("store");
200 fs::create_dir(&store_path).context(&store_path)?;
201 let backend = backend_initializer(settings, &store_path)?;
202 let backend_path = store_path.join("type");
203 fs::write(&backend_path, backend.name()).context(&backend_path)?;
204 let store = Store::new(backend, signer);
205
206 let op_store_path = repo_path.join("op_store");
207 fs::create_dir(&op_store_path).context(&op_store_path)?;
208 let root_op_data = RootOperationData {
209 root_commit_id: store.root_commit_id().clone(),
210 };
211 let op_store = op_store_initializer(settings, &op_store_path, root_op_data)?;
212 let op_store_type_path = op_store_path.join("type");
213 fs::write(&op_store_type_path, op_store.name()).context(&op_store_type_path)?;
214 let op_store: Arc<dyn OpStore> = Arc::from(op_store);
215
216 let op_heads_path = repo_path.join("op_heads");
217 fs::create_dir(&op_heads_path).context(&op_heads_path)?;
218 let op_heads_store = op_heads_store_initializer(settings, &op_heads_path)?;
219 let op_heads_type_path = op_heads_path.join("type");
220 fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?;
221 op_heads_store.update_op_heads(&[], op_store.root_operation_id())?;
222 let op_heads_store: Arc<dyn OpHeadsStore> = Arc::from(op_heads_store);
223
224 let index_path = repo_path.join("index");
225 fs::create_dir(&index_path).context(&index_path)?;
226 let index_store = index_store_initializer(settings, &index_path)?;
227 let index_type_path = index_path.join("type");
228 fs::write(&index_type_path, index_store.name()).context(&index_type_path)?;
229 let index_store: Arc<dyn IndexStore> = Arc::from(index_store);
230
231 let submodule_store_path = repo_path.join("submodule_store");
232 fs::create_dir(&submodule_store_path).context(&submodule_store_path)?;
233 let submodule_store = submodule_store_initializer(settings, &submodule_store_path)?;
234 let submodule_store_type_path = submodule_store_path.join("type");
235 fs::write(&submodule_store_type_path, submodule_store.name())
236 .context(&submodule_store_type_path)?;
237 let submodule_store = Arc::from(submodule_store);
238
239 let loader = RepoLoader {
240 settings: settings.clone(),
241 store,
242 op_store,
243 op_heads_store,
244 index_store,
245 submodule_store,
246 };
247
248 let root_operation = loader.root_operation();
249 let root_view = root_operation.view().expect("failed to read root view");
250 assert!(!root_view.heads().is_empty());
251 let index = loader
252 .index_store
253 .get_index_at_op(&root_operation, &loader.store)
254 .map_err(|err| BackendInitError(err.into()))?;
257 Ok(Arc::new(ReadonlyRepo {
258 loader,
259 operation: root_operation,
260 index,
261 change_id_index: OnceCell::new(),
262 view: root_view,
263 }))
264 }
265
266 pub fn loader(&self) -> &RepoLoader {
267 &self.loader
268 }
269
270 pub fn op_id(&self) -> &OperationId {
271 self.operation.id()
272 }
273
274 pub fn operation(&self) -> &Operation {
275 &self.operation
276 }
277
278 pub fn view(&self) -> &View {
279 &self.view
280 }
281
282 pub fn readonly_index(&self) -> &dyn ReadonlyIndex {
283 self.index.as_ref()
284 }
285
286 fn change_id_index(&self) -> &dyn ChangeIdIndex {
287 self.change_id_index
288 .get_or_init(|| {
289 self.readonly_index()
290 .change_id_index(&mut self.view().heads().iter())
291 })
292 .as_ref()
293 }
294
295 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
296 self.loader.op_heads_store()
297 }
298
299 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
300 self.loader.index_store()
301 }
302
303 pub fn settings(&self) -> &UserSettings {
304 self.loader.settings()
305 }
306
307 pub fn start_transaction(self: &Arc<ReadonlyRepo>) -> Transaction {
308 let mut_repo = MutableRepo::new(self.clone(), self.readonly_index(), &self.view);
309 Transaction::new(mut_repo, self.settings())
310 }
311
312 pub fn reload_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
313 self.loader().load_at_head()
314 }
315
316 #[instrument]
317 pub fn reload_at(&self, operation: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
318 self.loader().load_at(operation)
319 }
320}
321
322impl Repo for ReadonlyRepo {
323 fn base_repo(&self) -> &ReadonlyRepo {
324 self
325 }
326
327 fn store(&self) -> &Arc<Store> {
328 self.loader.store()
329 }
330
331 fn op_store(&self) -> &Arc<dyn OpStore> {
332 self.loader.op_store()
333 }
334
335 fn index(&self) -> &dyn Index {
336 self.readonly_index().as_index()
337 }
338
339 fn view(&self) -> &View {
340 &self.view
341 }
342
343 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
344 self.loader.submodule_store()
345 }
346
347 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>> {
348 self.change_id_index().resolve_prefix(prefix)
349 }
350
351 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
352 self.change_id_index().shortest_unique_prefix_len(target_id)
353 }
354}
355
356pub type BackendInitializer<'a> =
357 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendInitError> + 'a;
358#[rustfmt::skip] pub type OpStoreInitializer<'a> =
360 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendInitError>
361 + 'a;
362pub type OpHeadsStoreInitializer<'a> =
363 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendInitError> + 'a;
364pub type IndexStoreInitializer<'a> =
365 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendInitError> + 'a;
366pub type SubmoduleStoreInitializer<'a> =
367 dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendInitError> + 'a;
368
369type BackendFactory =
370 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn Backend>, BackendLoadError>>;
371type OpStoreFactory = Box<
372 dyn Fn(&UserSettings, &Path, RootOperationData) -> Result<Box<dyn OpStore>, BackendLoadError>,
373>;
374type OpHeadsStoreFactory =
375 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn OpHeadsStore>, BackendLoadError>>;
376type IndexStoreFactory =
377 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn IndexStore>, BackendLoadError>>;
378type SubmoduleStoreFactory =
379 Box<dyn Fn(&UserSettings, &Path) -> Result<Box<dyn SubmoduleStore>, BackendLoadError>>;
380
381pub fn merge_factories_map<F>(base: &mut HashMap<String, F>, ext: HashMap<String, F>) {
382 for (name, factory) in ext {
383 match base.entry(name) {
384 Entry::Vacant(v) => {
385 v.insert(factory);
386 }
387 Entry::Occupied(o) => {
388 panic!("Conflicting factory definitions for '{}' factory", o.key())
389 }
390 }
391 }
392}
393
394pub struct StoreFactories {
395 backend_factories: HashMap<String, BackendFactory>,
396 op_store_factories: HashMap<String, OpStoreFactory>,
397 op_heads_store_factories: HashMap<String, OpHeadsStoreFactory>,
398 index_store_factories: HashMap<String, IndexStoreFactory>,
399 submodule_store_factories: HashMap<String, SubmoduleStoreFactory>,
400}
401
402impl Default for StoreFactories {
403 fn default() -> Self {
404 let mut factories = StoreFactories::empty();
405
406 factories.add_backend(
408 SimpleBackend::name(),
409 Box::new(|_settings, store_path| Ok(Box::new(SimpleBackend::load(store_path)))),
410 );
411 #[cfg(feature = "git")]
412 factories.add_backend(
413 crate::git_backend::GitBackend::name(),
414 Box::new(|settings, store_path| {
415 Ok(Box::new(crate::git_backend::GitBackend::load(
416 settings, store_path,
417 )?))
418 }),
419 );
420 #[cfg(feature = "testing")]
421 factories.add_backend(
422 crate::secret_backend::SecretBackend::name(),
423 Box::new(|settings, store_path| {
424 Ok(Box::new(crate::secret_backend::SecretBackend::load(
425 settings, store_path,
426 )?))
427 }),
428 );
429
430 factories.add_op_store(
432 SimpleOpStore::name(),
433 Box::new(|_settings, store_path, root_data| {
434 Ok(Box::new(SimpleOpStore::load(store_path, root_data)))
435 }),
436 );
437
438 factories.add_op_heads_store(
440 SimpleOpHeadsStore::name(),
441 Box::new(|_settings, store_path| Ok(Box::new(SimpleOpHeadsStore::load(store_path)))),
442 );
443
444 factories.add_index_store(
446 DefaultIndexStore::name(),
447 Box::new(|_settings, store_path| Ok(Box::new(DefaultIndexStore::load(store_path)))),
448 );
449
450 factories.add_submodule_store(
452 DefaultSubmoduleStore::name(),
453 Box::new(|_settings, store_path| Ok(Box::new(DefaultSubmoduleStore::load(store_path)))),
454 );
455
456 factories
457 }
458}
459
460#[derive(Debug, Error)]
461pub enum StoreLoadError {
462 #[error("Unsupported {store} backend type '{store_type}'")]
463 UnsupportedType {
464 store: &'static str,
465 store_type: String,
466 },
467 #[error("Failed to read {store} backend type")]
468 ReadError {
469 store: &'static str,
470 source: PathError,
471 },
472 #[error(transparent)]
473 Backend(#[from] BackendLoadError),
474 #[error(transparent)]
475 Signing(#[from] SignInitError),
476}
477
478impl StoreFactories {
479 pub fn empty() -> Self {
480 StoreFactories {
481 backend_factories: HashMap::new(),
482 op_store_factories: HashMap::new(),
483 op_heads_store_factories: HashMap::new(),
484 index_store_factories: HashMap::new(),
485 submodule_store_factories: HashMap::new(),
486 }
487 }
488
489 pub fn merge(&mut self, ext: StoreFactories) {
490 let StoreFactories {
491 backend_factories,
492 op_store_factories,
493 op_heads_store_factories,
494 index_store_factories,
495 submodule_store_factories,
496 } = ext;
497
498 merge_factories_map(&mut self.backend_factories, backend_factories);
499 merge_factories_map(&mut self.op_store_factories, op_store_factories);
500 merge_factories_map(&mut self.op_heads_store_factories, op_heads_store_factories);
501 merge_factories_map(&mut self.index_store_factories, index_store_factories);
502 merge_factories_map(
503 &mut self.submodule_store_factories,
504 submodule_store_factories,
505 );
506 }
507
508 pub fn add_backend(&mut self, name: &str, factory: BackendFactory) {
509 self.backend_factories.insert(name.to_string(), factory);
510 }
511
512 pub fn load_backend(
513 &self,
514 settings: &UserSettings,
515 store_path: &Path,
516 ) -> Result<Box<dyn Backend>, StoreLoadError> {
517 let backend_type = read_store_type("commit", store_path.join("type"))?;
518 let backend_factory = self.backend_factories.get(&backend_type).ok_or_else(|| {
519 StoreLoadError::UnsupportedType {
520 store: "commit",
521 store_type: backend_type.to_string(),
522 }
523 })?;
524 Ok(backend_factory(settings, store_path)?)
525 }
526
527 pub fn add_op_store(&mut self, name: &str, factory: OpStoreFactory) {
528 self.op_store_factories.insert(name.to_string(), factory);
529 }
530
531 pub fn load_op_store(
532 &self,
533 settings: &UserSettings,
534 store_path: &Path,
535 root_data: RootOperationData,
536 ) -> Result<Box<dyn OpStore>, StoreLoadError> {
537 let op_store_type = read_store_type("operation", store_path.join("type"))?;
538 let op_store_factory = self.op_store_factories.get(&op_store_type).ok_or_else(|| {
539 StoreLoadError::UnsupportedType {
540 store: "operation",
541 store_type: op_store_type.to_string(),
542 }
543 })?;
544 Ok(op_store_factory(settings, store_path, root_data)?)
545 }
546
547 pub fn add_op_heads_store(&mut self, name: &str, factory: OpHeadsStoreFactory) {
548 self.op_heads_store_factories
549 .insert(name.to_string(), factory);
550 }
551
552 pub fn load_op_heads_store(
553 &self,
554 settings: &UserSettings,
555 store_path: &Path,
556 ) -> Result<Box<dyn OpHeadsStore>, StoreLoadError> {
557 let op_heads_store_type = read_store_type("operation heads", store_path.join("type"))?;
558 let op_heads_store_factory = self
559 .op_heads_store_factories
560 .get(&op_heads_store_type)
561 .ok_or_else(|| StoreLoadError::UnsupportedType {
562 store: "operation heads",
563 store_type: op_heads_store_type.to_string(),
564 })?;
565 Ok(op_heads_store_factory(settings, store_path)?)
566 }
567
568 pub fn add_index_store(&mut self, name: &str, factory: IndexStoreFactory) {
569 self.index_store_factories.insert(name.to_string(), factory);
570 }
571
572 pub fn load_index_store(
573 &self,
574 settings: &UserSettings,
575 store_path: &Path,
576 ) -> Result<Box<dyn IndexStore>, StoreLoadError> {
577 let index_store_type = read_store_type("index", store_path.join("type"))?;
578 let index_store_factory = self
579 .index_store_factories
580 .get(&index_store_type)
581 .ok_or_else(|| StoreLoadError::UnsupportedType {
582 store: "index",
583 store_type: index_store_type.to_string(),
584 })?;
585 Ok(index_store_factory(settings, store_path)?)
586 }
587
588 pub fn add_submodule_store(&mut self, name: &str, factory: SubmoduleStoreFactory) {
589 self.submodule_store_factories
590 .insert(name.to_string(), factory);
591 }
592
593 pub fn load_submodule_store(
594 &self,
595 settings: &UserSettings,
596 store_path: &Path,
597 ) -> Result<Box<dyn SubmoduleStore>, StoreLoadError> {
598 let submodule_store_type = read_store_type("submodule_store", store_path.join("type"))?;
599 let submodule_store_factory = self
600 .submodule_store_factories
601 .get(&submodule_store_type)
602 .ok_or_else(|| StoreLoadError::UnsupportedType {
603 store: "submodule_store",
604 store_type: submodule_store_type.to_string(),
605 })?;
606
607 Ok(submodule_store_factory(settings, store_path)?)
608 }
609}
610
611pub fn read_store_type(
612 store: &'static str,
613 path: impl AsRef<Path>,
614) -> Result<String, StoreLoadError> {
615 let path = path.as_ref();
616 fs::read_to_string(path)
617 .context(path)
618 .map_err(|source| StoreLoadError::ReadError { store, source })
619}
620
621#[derive(Debug, Error)]
622pub enum RepoLoaderError {
623 #[error(transparent)]
624 Backend(#[from] BackendError),
625 #[error(transparent)]
626 IndexRead(#[from] IndexReadError),
627 #[error(transparent)]
628 OpHeadResolution(#[from] OpHeadResolutionError),
629 #[error(transparent)]
630 OpHeadsStoreError(#[from] OpHeadsStoreError),
631 #[error(transparent)]
632 OpStore(#[from] OpStoreError),
633 #[error(transparent)]
634 TransactionCommit(#[from] TransactionCommitError),
635}
636
637#[derive(Clone)]
640pub struct RepoLoader {
641 settings: UserSettings,
642 store: Arc<Store>,
643 op_store: Arc<dyn OpStore>,
644 op_heads_store: Arc<dyn OpHeadsStore>,
645 index_store: Arc<dyn IndexStore>,
646 submodule_store: Arc<dyn SubmoduleStore>,
647}
648
649impl RepoLoader {
650 pub fn new(
651 settings: UserSettings,
652 store: Arc<Store>,
653 op_store: Arc<dyn OpStore>,
654 op_heads_store: Arc<dyn OpHeadsStore>,
655 index_store: Arc<dyn IndexStore>,
656 submodule_store: Arc<dyn SubmoduleStore>,
657 ) -> Self {
658 Self {
659 settings,
660 store,
661 op_store,
662 op_heads_store,
663 index_store,
664 submodule_store,
665 }
666 }
667
668 pub fn init_from_file_system(
672 settings: &UserSettings,
673 repo_path: &Path,
674 store_factories: &StoreFactories,
675 ) -> Result<Self, StoreLoadError> {
676 let store = Store::new(
677 store_factories.load_backend(settings, &repo_path.join("store"))?,
678 Signer::from_settings(settings)?,
679 );
680 let root_op_data = RootOperationData {
681 root_commit_id: store.root_commit_id().clone(),
682 };
683 let op_store = Arc::from(store_factories.load_op_store(
684 settings,
685 &repo_path.join("op_store"),
686 root_op_data,
687 )?);
688 let op_heads_store =
689 Arc::from(store_factories.load_op_heads_store(settings, &repo_path.join("op_heads"))?);
690 let index_store =
691 Arc::from(store_factories.load_index_store(settings, &repo_path.join("index"))?);
692 let submodule_store = Arc::from(
693 store_factories.load_submodule_store(settings, &repo_path.join("submodule_store"))?,
694 );
695 Ok(Self {
696 settings: settings.clone(),
697 store,
698 op_store,
699 op_heads_store,
700 index_store,
701 submodule_store,
702 })
703 }
704
705 pub fn settings(&self) -> &UserSettings {
706 &self.settings
707 }
708
709 pub fn store(&self) -> &Arc<Store> {
710 &self.store
711 }
712
713 pub fn index_store(&self) -> &Arc<dyn IndexStore> {
714 &self.index_store
715 }
716
717 pub fn op_store(&self) -> &Arc<dyn OpStore> {
718 &self.op_store
719 }
720
721 pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
722 &self.op_heads_store
723 }
724
725 pub fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
726 &self.submodule_store
727 }
728
729 pub fn load_at_head(&self) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
730 let op = op_heads_store::resolve_op_heads(
731 self.op_heads_store.as_ref(),
732 &self.op_store,
733 |op_heads| self._resolve_op_heads(op_heads),
734 )?;
735 let view = op.view()?;
736 self._finish_load(op, view)
737 }
738
739 #[instrument(skip(self))]
740 pub fn load_at(&self, op: &Operation) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
741 let view = op.view()?;
742 self._finish_load(op.clone(), view)
743 }
744
745 pub fn create_from(
746 &self,
747 operation: Operation,
748 view: View,
749 index: Box<dyn ReadonlyIndex>,
750 ) -> Arc<ReadonlyRepo> {
751 let repo = ReadonlyRepo {
752 loader: self.clone(),
753 operation,
754 index,
755 change_id_index: OnceCell::new(),
756 view,
757 };
758 Arc::new(repo)
759 }
760
761 pub fn root_operation(&self) -> Operation {
766 self.load_operation(self.op_store.root_operation_id())
767 .expect("failed to read root operation")
768 }
769
770 pub fn load_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
772 let data = self.op_store.read_operation(id)?;
773 Ok(Operation::new(self.op_store.clone(), id.clone(), data))
774 }
775
776 pub fn merge_operations(
779 &self,
780 operations: Vec<Operation>,
781 tx_description: Option<&str>,
782 ) -> Result<Operation, RepoLoaderError> {
783 let num_operations = operations.len();
784 let mut operations = operations.into_iter();
785 let Some(base_op) = operations.next() else {
786 return Ok(self.root_operation());
787 };
788 let final_op = if num_operations > 1 {
789 let base_repo = self.load_at(&base_op)?;
790 let mut tx = base_repo.start_transaction();
791 for other_op in operations {
792 tx.merge_operation(other_op)?;
793 tx.repo_mut().rebase_descendants()?;
794 }
795 let tx_description = tx_description.map_or_else(
796 || format!("merge {num_operations} operations"),
797 |tx_description| tx_description.to_string(),
798 );
799 let merged_repo = tx.write(tx_description)?.leave_unpublished();
800 merged_repo.operation().clone()
801 } else {
802 base_op
803 };
804
805 Ok(final_op)
806 }
807
808 fn _resolve_op_heads(&self, op_heads: Vec<Operation>) -> Result<Operation, RepoLoaderError> {
809 assert!(!op_heads.is_empty());
810 self.merge_operations(op_heads, Some("reconcile divergent operations"))
811 }
812
813 fn _finish_load(
814 &self,
815 operation: Operation,
816 view: View,
817 ) -> Result<Arc<ReadonlyRepo>, RepoLoaderError> {
818 let index = self.index_store.get_index_at_op(&operation, &self.store)?;
819 let repo = ReadonlyRepo {
820 loader: self.clone(),
821 operation,
822 index,
823 change_id_index: OnceCell::new(),
824 view,
825 };
826 Ok(Arc::new(repo))
827 }
828}
829
830#[derive(Clone, Debug, PartialEq, Eq)]
831enum Rewrite {
832 Rewritten(CommitId),
835 Divergent(Vec<CommitId>),
838 Abandoned(Vec<CommitId>),
841}
842
843impl Rewrite {
844 fn new_parent_ids(&self) -> &[CommitId] {
845 match self {
846 Rewrite::Rewritten(new_parent_id) => std::slice::from_ref(new_parent_id),
847 Rewrite::Divergent(new_parent_ids) => new_parent_ids.as_slice(),
848 Rewrite::Abandoned(new_parent_ids) => new_parent_ids.as_slice(),
849 }
850 }
851}
852
853pub struct MutableRepo {
854 base_repo: Arc<ReadonlyRepo>,
855 index: Box<dyn MutableIndex>,
856 view: DirtyCell<View>,
857 commit_predecessors: BTreeMap<CommitId, Vec<CommitId>>,
862 parent_mapping: HashMap<CommitId, Rewrite>,
871}
872
873impl MutableRepo {
874 pub fn new(
875 base_repo: Arc<ReadonlyRepo>,
876 index: &dyn ReadonlyIndex,
877 view: &View,
878 ) -> MutableRepo {
879 let mut_view = view.clone();
880 let mut_index = index.start_modification();
881 MutableRepo {
882 base_repo,
883 index: mut_index,
884 view: DirtyCell::with_clean(mut_view),
885 commit_predecessors: Default::default(),
886 parent_mapping: Default::default(),
887 }
888 }
889
890 pub fn base_repo(&self) -> &Arc<ReadonlyRepo> {
891 &self.base_repo
892 }
893
894 fn view_mut(&mut self) -> &mut View {
895 self.view.get_mut()
896 }
897
898 pub fn mutable_index(&self) -> &dyn MutableIndex {
899 self.index.as_ref()
900 }
901
902 pub(crate) fn is_backed_by_default_index(&self) -> bool {
903 self.index.as_any().is::<DefaultMutableIndex>()
904 }
905
906 pub fn has_changes(&self) -> bool {
907 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
908 !(self.commit_predecessors.is_empty()
909 && self.parent_mapping.is_empty()
910 && self.view() == &self.base_repo.view)
911 }
912
913 pub(crate) fn consume(
914 self,
915 ) -> (
916 Box<dyn MutableIndex>,
917 View,
918 BTreeMap<CommitId, Vec<CommitId>>,
919 ) {
920 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
921 (self.index, self.view.into_inner(), self.commit_predecessors)
922 }
923
924 pub fn new_commit(
926 &mut self,
927 parents: Vec<CommitId>,
928 tree_id: MergedTreeId,
929 ) -> CommitBuilder<'_> {
930 let settings = self.base_repo.settings();
931 DetachedCommitBuilder::for_new_commit(self, settings, parents, tree_id).attach(self)
932 }
933
934 pub fn rewrite_commit(&mut self, predecessor: &Commit) -> CommitBuilder<'_> {
936 let settings = self.base_repo.settings();
937 DetachedCommitBuilder::for_rewrite_from(self, settings, predecessor).attach(self)
938 }
941
942 pub(crate) fn set_predecessors(&mut self, id: CommitId, predecessors: Vec<CommitId>) {
943 self.commit_predecessors.insert(id, predecessors);
944 }
945
946 pub fn set_rewritten_commit(&mut self, old_id: CommitId, new_id: CommitId) {
953 assert_ne!(old_id, *self.store().root_commit_id());
954 self.parent_mapping
955 .insert(old_id, Rewrite::Rewritten(new_id));
956 }
957
958 pub fn set_divergent_rewrite(
966 &mut self,
967 old_id: CommitId,
968 new_ids: impl IntoIterator<Item = CommitId>,
969 ) {
970 assert_ne!(old_id, *self.store().root_commit_id());
971 self.parent_mapping.insert(
972 old_id.clone(),
973 Rewrite::Divergent(new_ids.into_iter().collect()),
974 );
975 }
976
977 pub fn record_abandoned_commit(&mut self, old_commit: &Commit) {
987 assert_ne!(old_commit.id(), self.store().root_commit_id());
988 self.record_abandoned_commit_with_parents(
990 old_commit.id().clone(),
991 old_commit.parent_ids().iter().cloned(),
992 );
993 }
994
995 pub fn record_abandoned_commit_with_parents(
1001 &mut self,
1002 old_id: CommitId,
1003 new_parent_ids: impl IntoIterator<Item = CommitId>,
1004 ) {
1005 assert_ne!(old_id, *self.store().root_commit_id());
1006 self.parent_mapping.insert(
1007 old_id,
1008 Rewrite::Abandoned(new_parent_ids.into_iter().collect()),
1009 );
1010 }
1011
1012 pub fn has_rewrites(&self) -> bool {
1013 !self.parent_mapping.is_empty()
1014 }
1015
1016 pub fn new_parents(&self, old_ids: &[CommitId]) -> Vec<CommitId> {
1023 self.rewritten_ids_with(old_ids, |rewrite| !matches!(rewrite, Rewrite::Divergent(_)))
1024 }
1025
1026 fn rewritten_ids_with(
1027 &self,
1028 old_ids: &[CommitId],
1029 mut predicate: impl FnMut(&Rewrite) -> bool,
1030 ) -> Vec<CommitId> {
1031 assert!(!old_ids.is_empty());
1032 let mut new_ids = Vec::with_capacity(old_ids.len());
1033 let mut to_visit = old_ids.iter().rev().collect_vec();
1034 let mut visited = HashSet::new();
1035 while let Some(id) = to_visit.pop() {
1036 if !visited.insert(id) {
1037 continue;
1038 }
1039 match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1040 None => {
1041 new_ids.push(id.clone());
1042 }
1043 Some(rewrite) => {
1044 let replacements = rewrite.new_parent_ids();
1045 assert!(
1046 !replacements.is_empty(),
1051 "Found empty value for key {id:?} in the parent mapping",
1052 );
1053 to_visit.extend(replacements.iter().rev());
1054 }
1055 }
1056 }
1057 assert!(
1058 !new_ids.is_empty(),
1059 "new ids become empty because of cycle in the parent mapping"
1060 );
1061 debug_assert!(new_ids.iter().all_unique());
1062 new_ids
1063 }
1064
1065 fn resolve_rewrite_mapping_with(
1069 &self,
1070 mut predicate: impl FnMut(&Rewrite) -> bool,
1071 ) -> HashMap<CommitId, Vec<CommitId>> {
1072 let sorted_ids = dag_walk::topo_order_forward(
1073 self.parent_mapping.keys(),
1074 |&id| id,
1075 |&id| match self.parent_mapping.get(id).filter(|&v| predicate(v)) {
1076 None => &[],
1077 Some(rewrite) => rewrite.new_parent_ids(),
1078 },
1079 );
1080 let mut new_mapping: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1081 for old_id in sorted_ids {
1082 let Some(rewrite) = self.parent_mapping.get(old_id).filter(|&v| predicate(v)) else {
1083 continue;
1084 };
1085 let lookup = |id| new_mapping.get(id).map_or(slice::from_ref(id), |ids| ids);
1086 let new_ids = match rewrite.new_parent_ids() {
1087 [id] => lookup(id).to_vec(), ids => ids.iter().flat_map(lookup).unique().cloned().collect(),
1089 };
1090 debug_assert_eq!(
1091 new_ids,
1092 self.rewritten_ids_with(slice::from_ref(old_id), &mut predicate)
1093 );
1094 new_mapping.insert(old_id.clone(), new_ids);
1095 }
1096 new_mapping
1097 }
1098
1099 pub fn update_rewritten_references(
1102 &mut self,
1103 options: &RewriteRefsOptions,
1104 ) -> BackendResult<()> {
1105 self.update_all_references(options)?;
1106 self.update_heads()
1107 .map_err(|err| err.into_backend_error())?;
1108 Ok(())
1109 }
1110
1111 fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> {
1112 let rewrite_mapping = self.resolve_rewrite_mapping_with(|_| true);
1113 self.update_local_bookmarks(&rewrite_mapping, options);
1114 self.update_wc_commits(&rewrite_mapping)?;
1115 Ok(())
1116 }
1117
1118 fn update_local_bookmarks(
1119 &mut self,
1120 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1121 options: &RewriteRefsOptions,
1122 ) {
1123 let changed_branches = self
1124 .view()
1125 .local_bookmarks()
1126 .flat_map(|(name, target)| {
1127 target.added_ids().filter_map(|id| {
1128 let change = rewrite_mapping.get_key_value(id)?;
1129 Some((name.to_owned(), change))
1130 })
1131 })
1132 .collect_vec();
1133 for (bookmark_name, (old_commit_id, new_commit_ids)) in changed_branches {
1134 let should_delete = options.delete_abandoned_bookmarks
1135 && matches!(
1136 self.parent_mapping.get(old_commit_id),
1137 Some(Rewrite::Abandoned(_))
1138 );
1139 let old_target = RefTarget::normal(old_commit_id.clone());
1140 let new_target = if should_delete {
1141 RefTarget::absent()
1142 } else {
1143 let ids = itertools::intersperse(new_commit_ids, old_commit_id)
1144 .map(|id| Some(id.clone()));
1145 RefTarget::from_merge(MergeBuilder::from_iter(ids).build())
1146 };
1147
1148 self.merge_local_bookmark(&bookmark_name, &old_target, &new_target);
1149 }
1150 }
1151
1152 fn update_wc_commits(
1153 &mut self,
1154 rewrite_mapping: &HashMap<CommitId, Vec<CommitId>>,
1155 ) -> BackendResult<()> {
1156 let changed_wc_commits = self
1157 .view()
1158 .wc_commit_ids()
1159 .iter()
1160 .filter_map(|(name, commit_id)| {
1161 let change = rewrite_mapping.get_key_value(commit_id)?;
1162 Some((name.to_owned(), change))
1163 })
1164 .collect_vec();
1165 let mut recreated_wc_commits: HashMap<&CommitId, Commit> = HashMap::new();
1166 for (name, (old_commit_id, new_commit_ids)) in changed_wc_commits {
1167 let abandoned_old_commit = matches!(
1168 self.parent_mapping.get(old_commit_id),
1169 Some(Rewrite::Abandoned(_))
1170 );
1171 let new_wc_commit = if !abandoned_old_commit {
1172 self.store().get_commit(&new_commit_ids[0])?
1174 } else if let Some(commit) = recreated_wc_commits.get(old_commit_id) {
1175 commit.clone()
1176 } else {
1177 let new_commits: Vec<_> = new_commit_ids
1178 .iter()
1179 .map(|id| self.store().get_commit(id))
1180 .try_collect()?;
1181 let merged_parents_tree = merge_commit_trees(self, &new_commits)?;
1182 let commit = self
1183 .new_commit(new_commit_ids.clone(), merged_parents_tree.id().clone())
1184 .write()?;
1185 recreated_wc_commits.insert(old_commit_id, commit.clone());
1186 commit
1187 };
1188 self.edit(name, &new_wc_commit).map_err(|err| match err {
1189 EditCommitError::BackendError(backend_error) => backend_error,
1190 EditCommitError::WorkingCopyCommitNotFound(_)
1191 | EditCommitError::RewriteRootCommit(_) => panic!("unexpected error: {err:?}"),
1192 })?;
1193 }
1194 Ok(())
1195 }
1196
1197 fn update_heads(&mut self) -> Result<(), RevsetEvaluationError> {
1198 let old_commits_expression =
1199 RevsetExpression::commits(self.parent_mapping.keys().cloned().collect());
1200 let heads_to_add_expression = old_commits_expression
1201 .parents()
1202 .minus(&old_commits_expression);
1203 let heads_to_add: Vec<_> = heads_to_add_expression
1204 .evaluate(self)?
1205 .iter()
1206 .try_collect()?;
1207
1208 let mut view = self.view().store_view().clone();
1209 for commit_id in self.parent_mapping.keys() {
1210 view.head_ids.remove(commit_id);
1211 }
1212 view.head_ids.extend(heads_to_add);
1213 self.set_view(view);
1214 Ok(())
1215 }
1216
1217 pub fn find_descendants_for_rebase(&self, roots: Vec<CommitId>) -> BackendResult<Vec<Commit>> {
1220 let to_visit_revset = RevsetExpression::commits(roots)
1221 .descendants()
1222 .minus(&RevsetExpression::commits(
1223 self.parent_mapping.keys().cloned().collect(),
1224 ))
1225 .evaluate(self)
1226 .map_err(|err| err.into_backend_error())?;
1227 let to_visit = to_visit_revset
1228 .iter()
1229 .commits(self.store())
1230 .try_collect()
1231 .map_err(|err| err.into_backend_error())?;
1232 Ok(to_visit)
1233 }
1234
1235 fn order_commits_for_rebase(
1238 &self,
1239 to_visit: Vec<Commit>,
1240 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1241 ) -> BackendResult<Vec<Commit>> {
1242 let to_visit_set: HashSet<CommitId> =
1243 to_visit.iter().map(|commit| commit.id().clone()).collect();
1244 let mut visited = HashSet::new();
1245 let store = self.store();
1248 dag_walk::topo_order_reverse_ok(
1249 to_visit.into_iter().map(Ok),
1250 |commit| commit.id().clone(),
1251 |commit| -> Vec<BackendResult<Commit>> {
1252 visited.insert(commit.id().clone());
1253 let mut dependents = vec![];
1254 let parent_ids = new_parents_map
1255 .get(commit.id())
1256 .map_or(commit.parent_ids(), |parent_ids| parent_ids);
1257 for parent_id in parent_ids {
1258 let parent = store.get_commit(parent_id);
1259 let Ok(parent) = parent else {
1260 dependents.push(parent);
1261 continue;
1262 };
1263 if let Some(rewrite) = self.parent_mapping.get(parent.id()) {
1264 for target in rewrite.new_parent_ids() {
1265 if to_visit_set.contains(target) && !visited.contains(target) {
1266 dependents.push(store.get_commit(target));
1267 }
1268 }
1269 }
1270 if to_visit_set.contains(parent.id()) {
1271 dependents.push(Ok(parent));
1272 }
1273 }
1274 dependents
1275 },
1276 |_| panic!("graph has cycle"),
1277 )
1278 }
1279
1280 pub fn transform_descendants(
1292 &mut self,
1293 roots: Vec<CommitId>,
1294 callback: impl FnMut(CommitRewriter) -> BackendResult<()>,
1295 ) -> BackendResult<()> {
1296 let options = RewriteRefsOptions::default();
1297 self.transform_descendants_with_options(roots, &HashMap::new(), &options, callback)
1298 }
1299
1300 pub fn transform_descendants_with_options(
1308 &mut self,
1309 roots: Vec<CommitId>,
1310 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1311 options: &RewriteRefsOptions,
1312 callback: impl FnMut(CommitRewriter) -> BackendResult<()>,
1313 ) -> BackendResult<()> {
1314 let descendants = self.find_descendants_for_rebase(roots)?;
1315 self.transform_commits(descendants, new_parents_map, options, callback)
1316 }
1317
1318 pub fn transform_commits(
1326 &mut self,
1327 commits: Vec<Commit>,
1328 new_parents_map: &HashMap<CommitId, Vec<CommitId>>,
1329 options: &RewriteRefsOptions,
1330 mut callback: impl FnMut(CommitRewriter) -> BackendResult<()>,
1331 ) -> BackendResult<()> {
1332 let mut to_visit = self.order_commits_for_rebase(commits, new_parents_map)?;
1333 while let Some(old_commit) = to_visit.pop() {
1334 let parent_ids = new_parents_map
1335 .get(old_commit.id())
1336 .map_or(old_commit.parent_ids(), |parent_ids| parent_ids);
1337 let new_parent_ids = self.new_parents(parent_ids);
1338 let rewriter = CommitRewriter::new(self, old_commit, new_parent_ids);
1339 callback(rewriter)?;
1340 }
1341 self.update_rewritten_references(options)?;
1342 Ok(())
1352 }
1353
1354 pub fn rebase_descendants_with_options(
1370 &mut self,
1371 options: &RebaseOptions,
1372 mut progress: impl FnMut(Commit, RebasedCommit),
1373 ) -> BackendResult<()> {
1374 let roots = self.parent_mapping.keys().cloned().collect();
1375 self.transform_descendants_with_options(
1376 roots,
1377 &HashMap::new(),
1378 &options.rewrite_refs,
1379 |rewriter| {
1380 if rewriter.parents_changed() {
1381 let old_commit = rewriter.old_commit().clone();
1382 let rebased_commit = rebase_commit_with_options(rewriter, options)?;
1383 progress(old_commit, rebased_commit);
1384 }
1385 Ok(())
1386 },
1387 )?;
1388 self.parent_mapping.clear();
1389 Ok(())
1390 }
1391
1392 pub fn rebase_descendants(&mut self) -> BackendResult<usize> {
1402 let options = RebaseOptions::default();
1403 let mut num_rebased = 0;
1404 self.rebase_descendants_with_options(&options, |_old_commit, _rebased_commit| {
1405 num_rebased += 1;
1406 })?;
1407 Ok(num_rebased)
1408 }
1409
1410 pub fn reparent_descendants(&mut self) -> BackendResult<usize> {
1417 let roots = self.parent_mapping.keys().cloned().collect_vec();
1418 let mut num_reparented = 0;
1419 self.transform_descendants(roots, |rewriter| {
1420 if rewriter.parents_changed() {
1421 let builder = rewriter.reparent();
1422 builder.write()?;
1423 num_reparented += 1;
1424 }
1425 Ok(())
1426 })?;
1427 self.parent_mapping.clear();
1428 Ok(num_reparented)
1429 }
1430
1431 pub fn set_wc_commit(
1432 &mut self,
1433 name: WorkspaceNameBuf,
1434 commit_id: CommitId,
1435 ) -> Result<(), RewriteRootCommit> {
1436 if &commit_id == self.store().root_commit_id() {
1437 return Err(RewriteRootCommit);
1438 }
1439 self.view_mut().set_wc_commit(name, commit_id);
1440 Ok(())
1441 }
1442
1443 pub fn remove_wc_commit(&mut self, name: &WorkspaceName) -> Result<(), EditCommitError> {
1444 self.maybe_abandon_wc_commit(name)?;
1445 self.view_mut().remove_wc_commit(name);
1446 Ok(())
1447 }
1448
1449 fn merge_wc_commit(
1452 &mut self,
1453 name: &WorkspaceName,
1454 base_id: Option<&CommitId>,
1455 other_id: Option<&CommitId>,
1456 ) {
1457 let view = self.view.get_mut();
1458 let self_id = view.get_wc_commit_id(name);
1459 let new_id = if let Some(resolved) = trivial_merge(&[self_id, base_id, other_id]) {
1463 resolved.cloned()
1464 } else if self_id.is_none() || other_id.is_none() {
1465 None
1468 } else {
1469 self_id.cloned()
1470 };
1471 match new_id {
1472 Some(id) => view.set_wc_commit(name.to_owned(), id),
1473 None => view.remove_wc_commit(name),
1474 }
1475 }
1476
1477 pub fn rename_workspace(
1478 &mut self,
1479 old_name: &WorkspaceName,
1480 new_name: WorkspaceNameBuf,
1481 ) -> Result<(), RenameWorkspaceError> {
1482 self.view_mut().rename_workspace(old_name, new_name)
1483 }
1484
1485 pub fn check_out(
1486 &mut self,
1487 name: WorkspaceNameBuf,
1488 commit: &Commit,
1489 ) -> Result<Commit, CheckOutCommitError> {
1490 let wc_commit = self
1491 .new_commit(vec![commit.id().clone()], commit.tree_id().clone())
1492 .write()?;
1493 self.edit(name, &wc_commit)?;
1494 Ok(wc_commit)
1495 }
1496
1497 pub fn edit(&mut self, name: WorkspaceNameBuf, commit: &Commit) -> Result<(), EditCommitError> {
1498 self.maybe_abandon_wc_commit(&name)?;
1499 self.add_head(commit)?;
1500 Ok(self.set_wc_commit(name, commit.id().clone())?)
1501 }
1502
1503 fn maybe_abandon_wc_commit(
1504 &mut self,
1505 workspace_name: &WorkspaceName,
1506 ) -> Result<(), EditCommitError> {
1507 let is_commit_referenced = |view: &View, commit_id: &CommitId| -> bool {
1508 view.wc_commit_ids()
1509 .iter()
1510 .filter(|&(name, _)| name != workspace_name)
1511 .map(|(_, wc_id)| wc_id)
1512 .chain(
1513 view.local_bookmarks()
1514 .flat_map(|(_, target)| target.added_ids()),
1515 )
1516 .any(|id| id == commit_id)
1517 };
1518
1519 let maybe_wc_commit_id = self
1520 .view
1521 .with_ref(|v| v.get_wc_commit_id(workspace_name).cloned());
1522 if let Some(wc_commit_id) = maybe_wc_commit_id {
1523 let wc_commit = self
1524 .store()
1525 .get_commit(&wc_commit_id)
1526 .map_err(EditCommitError::WorkingCopyCommitNotFound)?;
1527 if wc_commit.is_discardable(self)?
1528 && self
1529 .view
1530 .with_ref(|v| !is_commit_referenced(v, wc_commit.id()))
1531 && self.view().heads().contains(wc_commit.id())
1532 {
1533 self.record_abandoned_commit(&wc_commit);
1537 }
1538 }
1539
1540 Ok(())
1541 }
1542
1543 fn enforce_view_invariants(&self, view: &mut View) {
1544 let view = view.store_view_mut();
1545 let root_commit_id = self.store().root_commit_id();
1546 if view.head_ids.is_empty() {
1547 view.head_ids.insert(root_commit_id.clone());
1548 } else if view.head_ids.len() > 1 {
1549 view.head_ids.remove(root_commit_id);
1552 view.head_ids = self
1556 .index()
1557 .heads(&mut view.head_ids.iter())
1558 .unwrap()
1559 .into_iter()
1560 .collect();
1561 }
1562 assert!(!view.head_ids.is_empty());
1563 }
1564
1565 pub fn add_head(&mut self, head: &Commit) -> BackendResult<()> {
1568 self.add_heads(slice::from_ref(head))
1569 }
1570
1571 pub fn add_heads(&mut self, heads: &[Commit]) -> BackendResult<()> {
1578 let current_heads = self.view.get_mut().heads();
1579 match heads {
1583 [] => {}
1584 [head]
1585 if head
1586 .parent_ids()
1587 .iter()
1588 .all(|parent_id| current_heads.contains(parent_id)) =>
1589 {
1590 self.index.add_commit(head);
1591 self.view.get_mut().add_head(head.id());
1592 for parent_id in head.parent_ids() {
1593 self.view.get_mut().remove_head(parent_id);
1594 }
1595 }
1596 _ => {
1597 let missing_commits = dag_walk::topo_order_reverse_ord_ok(
1598 heads
1599 .iter()
1600 .cloned()
1601 .map(CommitByCommitterTimestamp)
1602 .map(Ok),
1603 |CommitByCommitterTimestamp(commit)| commit.id().clone(),
1604 |CommitByCommitterTimestamp(commit)| {
1605 commit
1606 .parent_ids()
1607 .iter()
1608 .filter(|id| !self.index().has_id(id))
1609 .map(|id| self.store().get_commit(id))
1610 .map_ok(CommitByCommitterTimestamp)
1611 .collect_vec()
1612 },
1613 |_| panic!("graph has cycle"),
1614 )?;
1615 for CommitByCommitterTimestamp(missing_commit) in missing_commits.iter().rev() {
1616 self.index.add_commit(missing_commit);
1617 }
1618 for head in heads {
1619 self.view.get_mut().add_head(head.id());
1620 }
1621 self.view.mark_dirty();
1622 }
1623 }
1624 Ok(())
1625 }
1626
1627 pub fn remove_head(&mut self, head: &CommitId) {
1628 self.view_mut().remove_head(head);
1629 self.view.mark_dirty();
1630 }
1631
1632 pub fn get_local_bookmark(&self, name: &RefName) -> RefTarget {
1633 self.view.with_ref(|v| v.get_local_bookmark(name).clone())
1634 }
1635
1636 pub fn set_local_bookmark_target(&mut self, name: &RefName, target: RefTarget) {
1637 let view = self.view_mut();
1638 for id in target.added_ids() {
1639 view.add_head(id);
1640 }
1641 view.set_local_bookmark_target(name, target);
1642 self.view.mark_dirty();
1643 }
1644
1645 pub fn merge_local_bookmark(
1646 &mut self,
1647 name: &RefName,
1648 base_target: &RefTarget,
1649 other_target: &RefTarget,
1650 ) {
1651 let view = self.view.get_mut();
1652 let index = self.index.as_index();
1653 let self_target = view.get_local_bookmark(name);
1654 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1655 self.set_local_bookmark_target(name, new_target);
1656 }
1657
1658 pub fn get_remote_bookmark(&self, symbol: RemoteRefSymbol<'_>) -> RemoteRef {
1659 self.view
1660 .with_ref(|v| v.get_remote_bookmark(symbol).clone())
1661 }
1662
1663 pub fn set_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>, remote_ref: RemoteRef) {
1664 self.view_mut().set_remote_bookmark(symbol, remote_ref);
1665 }
1666
1667 fn merge_remote_bookmark(
1668 &mut self,
1669 symbol: RemoteRefSymbol<'_>,
1670 base_ref: &RemoteRef,
1671 other_ref: &RemoteRef,
1672 ) {
1673 let view = self.view.get_mut();
1674 let index = self.index.as_index();
1675 let self_ref = view.get_remote_bookmark(symbol);
1676 let new_ref = merge_remote_refs(index, self_ref, base_ref, other_ref);
1677 view.set_remote_bookmark(symbol, new_ref);
1678 }
1679
1680 pub fn track_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1683 let mut remote_ref = self.get_remote_bookmark(symbol);
1684 let base_target = remote_ref.tracked_target();
1685 self.merge_local_bookmark(symbol.name, base_target, &remote_ref.target);
1686 remote_ref.state = RemoteRefState::Tracked;
1687 self.set_remote_bookmark(symbol, remote_ref);
1688 }
1689
1690 pub fn untrack_remote_bookmark(&mut self, symbol: RemoteRefSymbol<'_>) {
1692 let mut remote_ref = self.get_remote_bookmark(symbol);
1693 remote_ref.state = RemoteRefState::New;
1694 self.set_remote_bookmark(symbol, remote_ref);
1695 }
1696
1697 pub fn remove_remote(&mut self, remote_name: &RemoteName) {
1698 self.view_mut().remove_remote(remote_name);
1699 }
1700
1701 pub fn rename_remote(&mut self, old: &RemoteName, new: &RemoteName) {
1702 self.view_mut().rename_remote(old, new);
1703 }
1704
1705 pub fn get_tag(&self, name: &RefName) -> RefTarget {
1706 self.view.with_ref(|v| v.get_tag(name).clone())
1707 }
1708
1709 pub fn set_tag_target(&mut self, name: &RefName, target: RefTarget) {
1710 self.view_mut().set_tag_target(name, target);
1711 }
1712
1713 pub fn merge_tag(&mut self, name: &RefName, base_target: &RefTarget, other_target: &RefTarget) {
1714 let view = self.view.get_mut();
1715 let index = self.index.as_index();
1716 let self_target = view.get_tag(name);
1717 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1718 view.set_tag_target(name, new_target);
1719 }
1720
1721 pub fn get_git_ref(&self, name: &GitRefName) -> RefTarget {
1722 self.view.with_ref(|v| v.get_git_ref(name).clone())
1723 }
1724
1725 pub fn set_git_ref_target(&mut self, name: &GitRefName, target: RefTarget) {
1726 self.view_mut().set_git_ref_target(name, target);
1727 }
1728
1729 fn merge_git_ref(
1730 &mut self,
1731 name: &GitRefName,
1732 base_target: &RefTarget,
1733 other_target: &RefTarget,
1734 ) {
1735 let view = self.view.get_mut();
1736 let index = self.index.as_index();
1737 let self_target = view.get_git_ref(name);
1738 let new_target = merge_ref_targets(index, self_target, base_target, other_target);
1739 view.set_git_ref_target(name, new_target);
1740 }
1741
1742 pub fn git_head(&self) -> RefTarget {
1743 self.view.with_ref(|v| v.git_head().clone())
1744 }
1745
1746 pub fn set_git_head_target(&mut self, target: RefTarget) {
1747 self.view_mut().set_git_head_target(target);
1748 }
1749
1750 pub fn set_view(&mut self, data: op_store::View) {
1751 self.view_mut().set_view(data);
1752 self.view.mark_dirty();
1753 }
1754
1755 pub fn merge(
1756 &mut self,
1757 base_repo: &ReadonlyRepo,
1758 other_repo: &ReadonlyRepo,
1759 ) -> BackendResult<()> {
1760 self.index.merge_in(base_repo.readonly_index());
1765 self.index.merge_in(other_repo.readonly_index());
1766
1767 self.view.ensure_clean(|v| self.enforce_view_invariants(v));
1768 self.merge_view(&base_repo.view, &other_repo.view)?;
1769 self.view.mark_dirty();
1770 Ok(())
1771 }
1772
1773 pub fn merge_index(&mut self, other_repo: &ReadonlyRepo) {
1774 self.index.merge_in(other_repo.readonly_index());
1775 }
1776
1777 fn merge_view(&mut self, base: &View, other: &View) -> BackendResult<()> {
1778 let changed_wc_commits = diff_named_commit_ids(base.wc_commit_ids(), other.wc_commit_ids());
1779 for (name, (base_id, other_id)) in changed_wc_commits {
1780 self.merge_wc_commit(name, base_id, other_id);
1781 }
1782
1783 let base_heads = base.heads().iter().cloned().collect_vec();
1784 let own_heads = self.view().heads().iter().cloned().collect_vec();
1785 let other_heads = other.heads().iter().cloned().collect_vec();
1786
1787 if self.is_backed_by_default_index() {
1794 self.record_rewrites(&base_heads, &own_heads)?;
1795 self.record_rewrites(&base_heads, &other_heads)?;
1796 } else {
1799 for removed_head in base.heads().difference(other.heads()) {
1800 self.view_mut().remove_head(removed_head);
1801 }
1802 }
1803 for added_head in other.heads().difference(base.heads()) {
1804 self.view_mut().add_head(added_head);
1805 }
1806
1807 let changed_local_bookmarks =
1808 diff_named_ref_targets(base.local_bookmarks(), other.local_bookmarks());
1809 for (name, (base_target, other_target)) in changed_local_bookmarks {
1810 self.merge_local_bookmark(name, base_target, other_target);
1811 }
1812
1813 let changed_tags = diff_named_ref_targets(base.tags(), other.tags());
1814 for (name, (base_target, other_target)) in changed_tags {
1815 self.merge_tag(name, base_target, other_target);
1816 }
1817
1818 let changed_git_refs = diff_named_ref_targets(base.git_refs(), other.git_refs());
1819 for (name, (base_target, other_target)) in changed_git_refs {
1820 self.merge_git_ref(name, base_target, other_target);
1821 }
1822
1823 let changed_remote_bookmarks =
1824 diff_named_remote_refs(base.all_remote_bookmarks(), other.all_remote_bookmarks());
1825 for (symbol, (base_ref, other_ref)) in changed_remote_bookmarks {
1826 self.merge_remote_bookmark(symbol, base_ref, other_ref);
1827 }
1828
1829 let new_git_head_target = merge_ref_targets(
1830 self.index(),
1831 self.view().git_head(),
1832 base.git_head(),
1833 other.git_head(),
1834 );
1835 self.set_git_head_target(new_git_head_target);
1836
1837 Ok(())
1838 }
1839
1840 fn record_rewrites(
1843 &mut self,
1844 old_heads: &[CommitId],
1845 new_heads: &[CommitId],
1846 ) -> BackendResult<()> {
1847 let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
1848 for item in revset::walk_revs(self, old_heads, new_heads)
1849 .map_err(|err| err.into_backend_error())?
1850 .commit_change_ids()
1851 {
1852 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1853 removed_changes
1854 .entry(change_id)
1855 .or_default()
1856 .push(commit_id);
1857 }
1858 if removed_changes.is_empty() {
1859 return Ok(());
1860 }
1861
1862 let mut rewritten_changes = HashSet::new();
1863 let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1864 for item in revset::walk_revs(self, new_heads, old_heads)
1865 .map_err(|err| err.into_backend_error())?
1866 .commit_change_ids()
1867 {
1868 let (commit_id, change_id) = item.map_err(|err| err.into_backend_error())?;
1869 if let Some(old_commits) = removed_changes.get(&change_id) {
1870 for old_commit in old_commits {
1871 rewritten_commits
1872 .entry(old_commit.clone())
1873 .or_default()
1874 .push(commit_id.clone());
1875 }
1876 }
1877 rewritten_changes.insert(change_id);
1878 }
1879 for (old_commit, new_commits) in rewritten_commits {
1880 if new_commits.len() == 1 {
1881 self.set_rewritten_commit(
1882 old_commit.clone(),
1883 new_commits.into_iter().next().unwrap(),
1884 );
1885 } else {
1886 self.set_divergent_rewrite(old_commit.clone(), new_commits);
1887 }
1888 }
1889
1890 for (change_id, removed_commit_ids) in &removed_changes {
1891 if !rewritten_changes.contains(change_id) {
1892 for id in removed_commit_ids {
1893 let commit = self.store().get_commit(id)?;
1894 self.record_abandoned_commit(&commit);
1895 }
1896 }
1897 }
1898
1899 Ok(())
1900 }
1901}
1902
1903impl Repo for MutableRepo {
1904 fn base_repo(&self) -> &ReadonlyRepo {
1905 &self.base_repo
1906 }
1907
1908 fn store(&self) -> &Arc<Store> {
1909 self.base_repo.store()
1910 }
1911
1912 fn op_store(&self) -> &Arc<dyn OpStore> {
1913 self.base_repo.op_store()
1914 }
1915
1916 fn index(&self) -> &dyn Index {
1917 self.index.as_index()
1918 }
1919
1920 fn view(&self) -> &View {
1921 self.view
1922 .get_or_ensure_clean(|v| self.enforce_view_invariants(v))
1923 }
1924
1925 fn submodule_store(&self) -> &Arc<dyn SubmoduleStore> {
1926 self.base_repo.submodule_store()
1927 }
1928
1929 fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<CommitId>> {
1930 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
1931 change_id_index.resolve_prefix(prefix)
1932 }
1933
1934 fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
1935 let change_id_index = self.index.change_id_index(&mut self.view().heads().iter());
1936 change_id_index.shortest_unique_prefix_len(target_id)
1937 }
1938}
1939
1940#[derive(Debug, Error)]
1942#[error("Cannot rewrite the root commit")]
1943pub struct RewriteRootCommit;
1944
1945#[derive(Debug, Error)]
1947pub enum EditCommitError {
1948 #[error("Current working-copy commit not found")]
1949 WorkingCopyCommitNotFound(#[source] BackendError),
1950 #[error(transparent)]
1951 RewriteRootCommit(#[from] RewriteRootCommit),
1952 #[error(transparent)]
1953 BackendError(#[from] BackendError),
1954}
1955
1956#[derive(Debug, Error)]
1958pub enum CheckOutCommitError {
1959 #[error("Failed to create new working-copy commit")]
1960 CreateCommit(#[from] BackendError),
1961 #[error("Failed to edit commit")]
1962 EditCommit(#[from] EditCommitError),
1963}
1964
1965mod dirty_cell {
1966 use std::cell::OnceCell;
1967 use std::cell::RefCell;
1968
1969 #[derive(Clone, Debug)]
1973 pub struct DirtyCell<T> {
1974 clean: OnceCell<Box<T>>,
1977 dirty: RefCell<Option<Box<T>>>,
1978 }
1979
1980 impl<T> DirtyCell<T> {
1981 pub fn with_clean(value: T) -> Self {
1982 DirtyCell {
1983 clean: OnceCell::from(Box::new(value)),
1984 dirty: RefCell::new(None),
1985 }
1986 }
1987
1988 pub fn get_or_ensure_clean(&self, f: impl FnOnce(&mut T)) -> &T {
1989 self.clean.get_or_init(|| {
1990 let mut value = self.dirty.borrow_mut().take().unwrap();
1992 f(&mut value);
1993 value
1994 })
1995 }
1996
1997 pub fn ensure_clean(&self, f: impl FnOnce(&mut T)) {
1998 self.get_or_ensure_clean(f);
1999 }
2000
2001 pub fn into_inner(self) -> T {
2002 *self
2003 .clean
2004 .into_inner()
2005 .or_else(|| self.dirty.into_inner())
2006 .unwrap()
2007 }
2008
2009 pub fn with_ref<R>(&self, f: impl FnOnce(&T) -> R) -> R {
2010 if let Some(value) = self.clean.get() {
2011 f(value)
2012 } else {
2013 f(self.dirty.borrow().as_ref().unwrap())
2014 }
2015 }
2016
2017 pub fn get_mut(&mut self) -> &mut T {
2018 self.clean
2019 .get_mut()
2020 .or_else(|| self.dirty.get_mut().as_mut())
2021 .unwrap()
2022 }
2023
2024 pub fn mark_dirty(&mut self) {
2025 if let Some(value) = self.clean.take() {
2026 *self.dirty.get_mut() = Some(value);
2027 }
2028 }
2029 }
2030}