jujutsu_lib/
repo.rs

1// Copyright 2020 The Jujutsu Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15use std::collections::{HashMap, HashSet};
16use std::fmt::{Debug, Formatter};
17use std::io::ErrorKind;
18use std::path::{Path, PathBuf};
19use std::sync::Arc;
20use std::{fs, io};
21
22use itertools::Itertools;
23use once_cell::sync::OnceCell;
24use thiserror::Error;
25
26use self::dirty_cell::DirtyCell;
27use crate::backend::{Backend, BackendError, BackendResult, ChangeId, CommitId, ObjectId, TreeId};
28use crate::commit::Commit;
29use crate::commit_builder::CommitBuilder;
30use crate::dag_walk::topo_order_reverse;
31use crate::git_backend::GitBackend;
32use crate::index::{
33    HexPrefix, Index, IndexEntry, IndexPosition, MutableIndex, PrefixResolution, ReadonlyIndex,
34};
35use crate::index_store::IndexStore;
36use crate::local_backend::LocalBackend;
37use crate::op_heads_store::{self, OpHeadResolutionError, OpHeadsStore};
38use crate::op_store::{
39    BranchTarget, OpStore, OperationId, OperationMetadata, RefTarget, WorkspaceId,
40};
41use crate::operation::Operation;
42use crate::refs::merge_ref_targets;
43use crate::rewrite::DescendantRebaser;
44use crate::settings::{RepoSettings, UserSettings};
45use crate::simple_op_heads_store::SimpleOpHeadsStore;
46use crate::simple_op_store::SimpleOpStore;
47use crate::store::Store;
48use crate::transaction::Transaction;
49use crate::view::{RefName, View};
50use crate::{backend, op_store};
51
52pub trait Repo {
53    fn base_repo(&self) -> &Arc<ReadonlyRepo>;
54
55    fn store(&self) -> &Arc<Store>;
56
57    fn op_store(&self) -> &Arc<dyn OpStore>;
58
59    fn index(&self) -> &dyn Index;
60
61    fn view(&self) -> &View;
62
63    fn resolve_change_id(&self, change_id: &ChangeId) -> Option<Vec<IndexEntry>> {
64        // Replace this if we added more efficient lookup method.
65        let prefix = HexPrefix::from_bytes(change_id.as_bytes());
66        match self.resolve_change_id_prefix(&prefix) {
67            PrefixResolution::NoMatch => None,
68            PrefixResolution::SingleMatch(entries) => Some(entries),
69            PrefixResolution::AmbiguousMatch => panic!("complete change_id should be unambiguous"),
70        }
71    }
72
73    fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<IndexEntry>>;
74
75    fn shortest_unique_change_id_prefix_len(&self, target_id_bytes: &ChangeId) -> usize;
76}
77
78pub struct ReadonlyRepo {
79    repo_path: PathBuf,
80    store: Arc<Store>,
81    op_store: Arc<dyn OpStore>,
82    op_heads_store: Arc<dyn OpHeadsStore>,
83    operation: Operation,
84    settings: RepoSettings,
85    index_store: Arc<IndexStore>,
86    index: OnceCell<Arc<ReadonlyIndex>>,
87    // TODO: This should eventually become part of the index and not be stored fully in memory.
88    change_id_index: OnceCell<ChangeIdIndex>,
89    view: View,
90}
91
92impl Debug for ReadonlyRepo {
93    fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
94        f.debug_struct("Repo")
95            .field("repo_path", &self.repo_path)
96            .field("store", &self.store)
97            .finish()
98    }
99}
100
101impl ReadonlyRepo {
102    pub fn default_op_store_factory() -> impl FnOnce(&Path) -> Box<dyn OpStore> {
103        |store_path| Box::new(SimpleOpStore::init(store_path))
104    }
105
106    pub fn default_op_heads_store_factory() -> impl FnOnce(
107        &Path,
108        &Arc<dyn OpStore>,
109        &op_store::View,
110        OperationMetadata,
111    ) -> (Box<dyn OpHeadsStore>, Operation) {
112        |store_path, op_store, view, operation_metadata| {
113            let (store, op) =
114                SimpleOpHeadsStore::init(store_path, op_store, view, operation_metadata);
115            (Box::new(store), op)
116        }
117    }
118
119    pub fn init(
120        user_settings: &UserSettings,
121        repo_path: &Path,
122        backend_factory: impl FnOnce(&Path) -> Box<dyn Backend>,
123        op_store_factory: impl FnOnce(&Path) -> Box<dyn OpStore>,
124        op_heads_store_factory: impl FnOnce(
125            &Path,
126            &Arc<dyn OpStore>,
127            &op_store::View,
128            OperationMetadata,
129        ) -> (Box<dyn OpHeadsStore>, Operation),
130    ) -> Result<Arc<ReadonlyRepo>, PathError> {
131        let repo_path = repo_path.canonicalize().context(repo_path)?;
132
133        let store_path = repo_path.join("store");
134        fs::create_dir(&store_path).context(&store_path)?;
135        let backend = backend_factory(&store_path);
136        let backend_path = store_path.join("type");
137        fs::write(&backend_path, backend.name()).context(&backend_path)?;
138        let store = Store::new(backend);
139        let repo_settings = user_settings.with_repo(&repo_path).unwrap();
140
141        let op_store_path = repo_path.join("op_store");
142        fs::create_dir(&op_store_path).context(&op_store_path)?;
143        let op_store = op_store_factory(&op_store_path);
144        let op_store_type_path = op_store_path.join("type");
145        fs::write(&op_store_type_path, op_store.name()).context(&op_store_type_path)?;
146        let op_store = Arc::from(op_store);
147
148        let mut root_view = op_store::View::default();
149        root_view.head_ids.insert(store.root_commit_id().clone());
150        root_view
151            .public_head_ids
152            .insert(store.root_commit_id().clone());
153
154        let op_heads_path = repo_path.join("op_heads");
155        fs::create_dir(&op_heads_path).context(&op_heads_path)?;
156        let operation_metadata =
157            crate::transaction::create_op_metadata(user_settings, "initialize repo".to_string());
158        let (op_heads_store, init_op) =
159            op_heads_store_factory(&op_heads_path, &op_store, &root_view, operation_metadata);
160        let op_heads_type_path = op_heads_path.join("type");
161        fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?;
162        let op_heads_store = Arc::from(op_heads_store);
163
164        let index_path = repo_path.join("index");
165        fs::create_dir(&index_path).context(&index_path)?;
166        let index_store = Arc::new(IndexStore::init(index_path));
167
168        let view = View::new(root_view);
169        Ok(Arc::new(ReadonlyRepo {
170            repo_path,
171            store,
172            op_store,
173            op_heads_store,
174            operation: init_op,
175            settings: repo_settings,
176            index_store,
177            index: OnceCell::new(),
178            change_id_index: OnceCell::new(),
179            view,
180        }))
181    }
182
183    pub fn load_at_head(
184        user_settings: &UserSettings,
185        repo_path: &Path,
186        store_factories: &StoreFactories,
187    ) -> Result<Arc<ReadonlyRepo>, OpHeadResolutionError<BackendError>> {
188        RepoLoader::init(user_settings, repo_path, store_factories).load_at_head(user_settings)
189    }
190
191    pub fn loader(&self) -> RepoLoader {
192        RepoLoader {
193            repo_path: self.repo_path.clone(),
194            repo_settings: self.settings.clone(),
195            store: self.store.clone(),
196            op_store: self.op_store.clone(),
197            op_heads_store: self.op_heads_store.clone(),
198            index_store: self.index_store.clone(),
199        }
200    }
201
202    pub fn repo_path(&self) -> &PathBuf {
203        &self.repo_path
204    }
205
206    pub fn op_id(&self) -> &OperationId {
207        self.operation.id()
208    }
209
210    pub fn operation(&self) -> &Operation {
211        &self.operation
212    }
213
214    pub fn view(&self) -> &View {
215        &self.view
216    }
217
218    pub fn readonly_index(&self) -> &Arc<ReadonlyIndex> {
219        self.index.get_or_init(|| {
220            self.index_store
221                .get_index_at_op(&self.operation, &self.store)
222        })
223    }
224
225    fn change_id_index(&self) -> &ChangeIdIndex {
226        self.change_id_index.get_or_init(|| {
227            let heads = self.view().heads().iter().cloned().collect_vec();
228            let walk = self.readonly_index().walk_revs(&heads, &[]);
229            IdIndex::from_vec(
230                walk.map(|entry| (entry.change_id(), entry.position()))
231                    .collect(),
232            )
233        })
234    }
235
236    pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
237        &self.op_heads_store
238    }
239
240    pub fn index_store(&self) -> &Arc<IndexStore> {
241        &self.index_store
242    }
243
244    pub fn settings(&self) -> &RepoSettings {
245        &self.settings
246    }
247
248    pub fn start_transaction(
249        self: &Arc<ReadonlyRepo>,
250        user_settings: &UserSettings,
251        description: &str,
252    ) -> Transaction {
253        let mut_repo = MutableRepo::new(self.clone(), self.readonly_index().clone(), &self.view);
254        Transaction::new(mut_repo, user_settings, description)
255    }
256
257    pub fn reload_at_head(
258        &self,
259        user_settings: &UserSettings,
260    ) -> Result<Arc<ReadonlyRepo>, OpHeadResolutionError<BackendError>> {
261        self.loader().load_at_head(user_settings)
262    }
263
264    pub fn reload_at(&self, operation: &Operation) -> Arc<ReadonlyRepo> {
265        self.loader().load_at(operation)
266    }
267}
268
269impl Repo for Arc<ReadonlyRepo> {
270    fn base_repo(&self) -> &Arc<ReadonlyRepo> {
271        self
272    }
273
274    fn store(&self) -> &Arc<Store> {
275        &self.store
276    }
277
278    fn op_store(&self) -> &Arc<dyn OpStore> {
279        &self.op_store
280    }
281
282    fn index(&self) -> &dyn Index {
283        self.readonly_index().as_ref()
284    }
285
286    fn view(&self) -> &View {
287        &self.view
288    }
289
290    fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<IndexEntry>> {
291        let index = self.index();
292        self.change_id_index()
293            .resolve_prefix_with(prefix, |&pos| index.entry_by_pos(pos))
294    }
295
296    fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
297        self.change_id_index().shortest_unique_prefix_len(target_id)
298    }
299}
300
301type BackendFactory = Box<dyn Fn(&Path) -> Box<dyn Backend>>;
302type OpStoreFactory = Box<dyn Fn(&Path) -> Box<dyn OpStore>>;
303type OpHeadsStoreFactory = Box<dyn Fn(&Path) -> Box<dyn OpHeadsStore>>;
304
305pub struct StoreFactories {
306    backend_factories: HashMap<String, BackendFactory>,
307    op_store_factories: HashMap<String, OpStoreFactory>,
308    op_heads_store_factories: HashMap<String, OpHeadsStoreFactory>,
309}
310
311impl Default for StoreFactories {
312    fn default() -> Self {
313        let mut factories = StoreFactories::empty();
314
315        // Backends
316        factories.add_backend(
317            "local",
318            Box::new(|store_path| Box::new(LocalBackend::load(store_path))),
319        );
320        factories.add_backend(
321            "git",
322            Box::new(|store_path| Box::new(GitBackend::load(store_path))),
323        );
324
325        // OpStores
326        factories.add_op_store(
327            "simple_op_store",
328            Box::new(|store_path| Box::new(SimpleOpStore::load(store_path))),
329        );
330
331        // OpHeadsStores
332        factories.add_op_heads_store(
333            "simple_op_heads_store",
334            Box::new(|store_path| Box::new(SimpleOpHeadsStore::load(store_path))),
335        );
336
337        factories
338    }
339}
340
341impl StoreFactories {
342    pub fn empty() -> Self {
343        StoreFactories {
344            backend_factories: HashMap::new(),
345            op_store_factories: HashMap::new(),
346            op_heads_store_factories: HashMap::new(),
347        }
348    }
349
350    pub fn add_backend(&mut self, name: &str, factory: BackendFactory) {
351        self.backend_factories.insert(name.to_string(), factory);
352    }
353
354    pub fn load_backend(&self, store_path: &Path) -> Box<dyn Backend> {
355        // For compatibility with existing repos. TODO: Delete in 0.8+.
356        if store_path.join("backend").is_file() {
357            fs::rename(store_path.join("backend"), store_path.join("type"))
358                .expect("Failed to rename 'backend' file to 'type'");
359        }
360        let backend_type = match fs::read_to_string(store_path.join("type")) {
361            Ok(content) => content,
362            Err(err) if err.kind() == ErrorKind::NotFound => {
363                // For compatibility with existing repos. TODO: Delete in 0.8+.
364                let inferred_type = if store_path.join("git_target").is_file() {
365                    String::from("git")
366                } else {
367                    String::from("local")
368                };
369                fs::write(store_path.join("type"), &inferred_type).unwrap();
370                inferred_type
371            }
372            Err(_) => {
373                panic!("Failed to read backend type");
374            }
375        };
376        let backend_factory = self
377            .backend_factories
378            .get(&backend_type)
379            .expect("Unexpected backend type");
380        backend_factory(store_path)
381    }
382
383    pub fn add_op_store(&mut self, name: &str, factory: OpStoreFactory) {
384        self.op_store_factories.insert(name.to_string(), factory);
385    }
386
387    pub fn load_op_store(&self, store_path: &Path) -> Box<dyn OpStore> {
388        let op_store_type = match fs::read_to_string(store_path.join("type")) {
389            Ok(content) => content,
390            Err(err) if err.kind() == ErrorKind::NotFound => {
391                // For compatibility with existing repos. TODO: Delete in 0.8+
392                let default_type = String::from("simple_op_store");
393                fs::write(store_path.join("type"), &default_type).unwrap();
394                default_type
395            }
396            Err(_) => {
397                panic!("Failed to read op_store type");
398            }
399        };
400        let op_store_factory = self
401            .op_store_factories
402            .get(&op_store_type)
403            .expect("Unexpected op_store type");
404        op_store_factory(store_path)
405    }
406
407    pub fn add_op_heads_store(&mut self, name: &str, factory: OpHeadsStoreFactory) {
408        self.op_heads_store_factories
409            .insert(name.to_string(), factory);
410    }
411
412    pub fn load_op_heads_store(&self, store_path: &Path) -> Box<dyn OpHeadsStore> {
413        let op_heads_store_type = match fs::read_to_string(store_path.join("type")) {
414            Ok(content) => content,
415            Err(err) if err.kind() == ErrorKind::NotFound => {
416                // For compatibility with existing repos. TODO: Delete in 0.8+
417                let default_type = String::from("simple_op_heads_store");
418                fs::write(store_path.join("type"), &default_type).unwrap();
419                default_type
420            }
421            Err(_) => {
422                panic!("Failed to read op_heads_store type");
423            }
424        };
425        let op_heads_store_factory = self
426            .op_heads_store_factories
427            .get(&op_heads_store_type)
428            .expect("Unexpected op_heads_store type");
429        op_heads_store_factory(store_path)
430    }
431}
432
433#[derive(Clone)]
434pub struct RepoLoader {
435    repo_path: PathBuf,
436    repo_settings: RepoSettings,
437    store: Arc<Store>,
438    op_store: Arc<dyn OpStore>,
439    op_heads_store: Arc<dyn OpHeadsStore>,
440    index_store: Arc<IndexStore>,
441}
442
443impl RepoLoader {
444    pub fn init(
445        user_settings: &UserSettings,
446        repo_path: &Path,
447        store_factories: &StoreFactories,
448    ) -> Self {
449        let store = Store::new(store_factories.load_backend(&repo_path.join("store")));
450        let repo_settings = user_settings.with_repo(repo_path).unwrap();
451        let op_store = Arc::from(store_factories.load_op_store(&repo_path.join("op_store")));
452        let op_heads_store =
453            Arc::from(store_factories.load_op_heads_store(&repo_path.join("op_heads")));
454        let index_store = Arc::new(IndexStore::load(repo_path.join("index")));
455        Self {
456            repo_path: repo_path.to_path_buf(),
457            repo_settings,
458            store,
459            op_store,
460            op_heads_store,
461            index_store,
462        }
463    }
464
465    pub fn repo_path(&self) -> &PathBuf {
466        &self.repo_path
467    }
468
469    pub fn store(&self) -> &Arc<Store> {
470        &self.store
471    }
472
473    pub fn index_store(&self) -> &Arc<IndexStore> {
474        &self.index_store
475    }
476
477    pub fn op_store(&self) -> &Arc<dyn OpStore> {
478        &self.op_store
479    }
480
481    pub fn op_heads_store(&self) -> &Arc<dyn OpHeadsStore> {
482        &self.op_heads_store
483    }
484
485    pub fn load_at_head(
486        &self,
487        user_settings: &UserSettings,
488    ) -> Result<Arc<ReadonlyRepo>, OpHeadResolutionError<BackendError>> {
489        let op = op_heads_store::resolve_op_heads(
490            self.op_heads_store.as_ref(),
491            &self.op_store,
492            |op_heads| self._resolve_op_heads(op_heads, user_settings),
493        )?;
494        let view = View::new(op.view().take_store_view());
495        Ok(self._finish_load(op, view))
496    }
497
498    pub fn load_at(&self, op: &Operation) -> Arc<ReadonlyRepo> {
499        let view = View::new(op.view().take_store_view());
500        self._finish_load(op.clone(), view)
501    }
502
503    pub fn create_from(
504        &self,
505        operation: Operation,
506        view: View,
507        index: Arc<ReadonlyIndex>,
508    ) -> Arc<ReadonlyRepo> {
509        let repo = ReadonlyRepo {
510            repo_path: self.repo_path.clone(),
511            store: self.store.clone(),
512            op_store: self.op_store.clone(),
513            op_heads_store: self.op_heads_store.clone(),
514            operation,
515            settings: self.repo_settings.clone(),
516            index_store: self.index_store.clone(),
517            index: OnceCell::with_value(index),
518            change_id_index: OnceCell::new(),
519            view,
520        };
521        Arc::new(repo)
522    }
523
524    fn _resolve_op_heads(
525        &self,
526        op_heads: Vec<Operation>,
527        user_settings: &UserSettings,
528    ) -> Result<Operation, BackendError> {
529        let base_repo = self.load_at(&op_heads[0]);
530        let mut tx = base_repo.start_transaction(user_settings, "resolve concurrent operations");
531        for other_op_head in op_heads.into_iter().skip(1) {
532            tx.merge_operation(other_op_head);
533            tx.mut_repo().rebase_descendants(user_settings)?;
534        }
535        let merged_repo = tx.write().leave_unpublished();
536        Ok(merged_repo.operation().clone())
537    }
538
539    fn _finish_load(&self, operation: Operation, view: View) -> Arc<ReadonlyRepo> {
540        let repo = ReadonlyRepo {
541            repo_path: self.repo_path.clone(),
542            store: self.store.clone(),
543            op_store: self.op_store.clone(),
544            op_heads_store: self.op_heads_store.clone(),
545            operation,
546            settings: self.repo_settings.clone(),
547            index_store: self.index_store.clone(),
548            index: OnceCell::new(),
549            change_id_index: OnceCell::new(),
550            view,
551        };
552        Arc::new(repo)
553    }
554}
555
556pub struct MutableRepo {
557    base_repo: Arc<ReadonlyRepo>,
558    index: MutableIndex,
559    view: DirtyCell<View>,
560    rewritten_commits: HashMap<CommitId, HashSet<CommitId>>,
561    abandoned_commits: HashSet<CommitId>,
562}
563
564impl MutableRepo {
565    pub fn new(
566        base_repo: Arc<ReadonlyRepo>,
567        index: Arc<ReadonlyIndex>,
568        view: &View,
569    ) -> MutableRepo {
570        let mut_view = view.clone();
571        let mut_index = MutableIndex::incremental(index);
572        MutableRepo {
573            base_repo,
574            index: mut_index,
575            view: DirtyCell::with_clean(mut_view),
576            rewritten_commits: Default::default(),
577            abandoned_commits: Default::default(),
578        }
579    }
580
581    fn view_mut(&mut self) -> &mut View {
582        self.view.get_mut()
583    }
584
585    pub fn has_changes(&self) -> bool {
586        !(self.abandoned_commits.is_empty()
587            && self.rewritten_commits.is_empty()
588            && self.view() == &self.base_repo.view)
589    }
590
591    pub fn consume(self) -> (MutableIndex, View) {
592        self.view.ensure_clean(|v| self.enforce_view_invariants(v));
593        (self.index, self.view.into_inner())
594    }
595
596    pub fn new_commit(
597        &mut self,
598        settings: &UserSettings,
599        parents: Vec<CommitId>,
600        tree_id: TreeId,
601    ) -> CommitBuilder {
602        CommitBuilder::for_new_commit(self, settings, parents, tree_id)
603    }
604
605    pub fn rewrite_commit(
606        &mut self,
607        settings: &UserSettings,
608        predecessor: &Commit,
609    ) -> CommitBuilder {
610        CommitBuilder::for_rewrite_from(self, settings, predecessor)
611    }
612
613    pub fn write_commit(&mut self, commit: backend::Commit) -> BackendResult<Commit> {
614        let commit = self.store().write_commit(commit)?;
615        self.add_head(&commit);
616        Ok(commit)
617    }
618
619    /// Record a commit as having been rewritten in this transaction. This
620    /// record is used by `rebase_descendants()`.
621    ///
622    /// Rewritten commits don't have to be recorded here. This is just a
623    /// convenient place to record it. It won't matter after the transaction
624    /// has been committed.
625    pub fn record_rewritten_commit(&mut self, old_id: CommitId, new_id: CommitId) {
626        assert_ne!(old_id, *self.store().root_commit_id());
627        self.rewritten_commits
628            .entry(old_id)
629            .or_default()
630            .insert(new_id);
631    }
632
633    pub fn clear_rewritten_commits(&mut self) {
634        self.rewritten_commits.clear();
635    }
636
637    /// Record a commit as having been abandoned in this transaction. This
638    /// record is used by `rebase_descendants()`.
639    ///
640    /// Abandoned commits don't have to be recorded here. This is just a
641    /// convenient place to record it. It won't matter after the transaction
642    /// has been committed.
643    pub fn record_abandoned_commit(&mut self, old_id: CommitId) {
644        assert_ne!(old_id, *self.store().root_commit_id());
645        self.abandoned_commits.insert(old_id);
646    }
647
648    pub fn clear_abandoned_commits(&mut self) {
649        self.abandoned_commits.clear();
650    }
651
652    pub fn has_rewrites(&self) -> bool {
653        !(self.rewritten_commits.is_empty() && self.abandoned_commits.is_empty())
654    }
655
656    /// Creates a `DescendantRebaser` to rebase descendants of the recorded
657    /// rewritten and abandoned commits.
658    pub fn create_descendant_rebaser<'settings, 'repo>(
659        &'repo mut self,
660        settings: &'settings UserSettings,
661    ) -> DescendantRebaser<'settings, 'repo> {
662        DescendantRebaser::new(
663            settings,
664            self,
665            self.rewritten_commits.clone(),
666            self.abandoned_commits.clone(),
667        )
668    }
669
670    pub fn rebase_descendants(&mut self, settings: &UserSettings) -> Result<usize, BackendError> {
671        if !self.has_rewrites() {
672            // Optimization
673            return Ok(0);
674        }
675        let mut rebaser = self.create_descendant_rebaser(settings);
676        rebaser.rebase_all()?;
677        Ok(rebaser.rebased().len())
678    }
679
680    pub fn set_wc_commit(
681        &mut self,
682        workspace_id: WorkspaceId,
683        commit_id: CommitId,
684    ) -> Result<(), RewriteRootCommit> {
685        if &commit_id == self.store().root_commit_id() {
686            return Err(RewriteRootCommit);
687        }
688        self.view_mut().set_wc_commit(workspace_id, commit_id);
689        Ok(())
690    }
691
692    pub fn remove_wc_commit(&mut self, workspace_id: &WorkspaceId) {
693        self.view_mut().remove_wc_commit(workspace_id);
694    }
695
696    pub fn check_out(
697        &mut self,
698        workspace_id: WorkspaceId,
699        settings: &UserSettings,
700        commit: &Commit,
701    ) -> Result<Commit, CheckOutCommitError> {
702        let wc_commit = self
703            .new_commit(
704                settings,
705                vec![commit.id().clone()],
706                commit.tree_id().clone(),
707            )
708            .write()?;
709        self.edit(workspace_id, &wc_commit)?;
710        Ok(wc_commit)
711    }
712
713    pub fn edit(
714        &mut self,
715        workspace_id: WorkspaceId,
716        commit: &Commit,
717    ) -> Result<(), EditCommitError> {
718        let maybe_wc_commit_id = self
719            .view
720            .with_ref(|v| v.get_wc_commit_id(&workspace_id).cloned());
721        if let Some(wc_commit_id) = maybe_wc_commit_id {
722            let wc_commit = self
723                .store()
724                .get_commit(&wc_commit_id)
725                .map_err(EditCommitError::WorkingCopyCommitNotFound)?;
726            if wc_commit.parent_ids().len() == 1
727                && wc_commit.parents()[0].tree_id() == wc_commit.tree_id()
728                && wc_commit.description().is_empty()
729                && self.view().heads().contains(wc_commit.id())
730            {
731                // Abandon the working-copy commit we're leaving if it's empty and a head commit
732                self.record_abandoned_commit(wc_commit_id);
733            }
734        }
735        self.set_wc_commit(workspace_id, commit.id().clone())
736            .map_err(|RewriteRootCommit| EditCommitError::RewriteRootCommit)
737    }
738
739    fn enforce_view_invariants(&self, view: &mut View) {
740        let view = view.store_view_mut();
741        view.public_head_ids = self
742            .index
743            .heads(&mut view.public_head_ids.iter())
744            .iter()
745            .cloned()
746            .collect();
747        view.head_ids.extend(view.public_head_ids.iter().cloned());
748        view.head_ids = self
749            .index
750            .heads(&mut view.head_ids.iter())
751            .iter()
752            .cloned()
753            .collect();
754    }
755
756    pub fn add_head(&mut self, head: &Commit) {
757        let current_heads = self.view.get_mut().heads();
758        // Use incremental update for common case of adding a single commit on top a
759        // current head. TODO: Also use incremental update when adding a single
760        // commit on top a non-head.
761        if head
762            .parent_ids()
763            .iter()
764            .all(|parent_id| current_heads.contains(parent_id))
765        {
766            self.index.add_commit(head);
767            self.view.get_mut().add_head(head.id());
768            for parent_id in head.parent_ids() {
769                self.view.get_mut().remove_head(parent_id);
770            }
771        } else {
772            let missing_commits = topo_order_reverse(
773                vec![head.clone()],
774                Box::new(|commit: &Commit| commit.id().clone()),
775                Box::new(|commit: &Commit| -> Vec<Commit> {
776                    commit
777                        .parents()
778                        .into_iter()
779                        .filter(|parent| !self.index.has_id(parent.id()))
780                        .collect()
781                }),
782            );
783            for missing_commit in missing_commits.iter().rev() {
784                self.index.add_commit(missing_commit);
785            }
786            self.view.get_mut().add_head(head.id());
787            self.view.mark_dirty();
788        }
789    }
790
791    pub fn remove_head(&mut self, head: &CommitId) {
792        self.view_mut().remove_head(head);
793        self.view.mark_dirty();
794    }
795
796    pub fn add_public_head(&mut self, head: &Commit) {
797        self.view_mut().add_public_head(head.id());
798        self.view.mark_dirty();
799    }
800
801    pub fn remove_public_head(&mut self, head: &CommitId) {
802        self.view_mut().remove_public_head(head);
803        self.view.mark_dirty();
804    }
805
806    pub fn get_branch(&self, name: &str) -> Option<BranchTarget> {
807        self.view.with_ref(|v| v.get_branch(name).cloned())
808    }
809
810    pub fn set_branch(&mut self, name: String, target: BranchTarget) {
811        self.view_mut().set_branch(name, target);
812    }
813
814    pub fn remove_branch(&mut self, name: &str) {
815        self.view_mut().remove_branch(name);
816    }
817
818    pub fn get_local_branch(&self, name: &str) -> Option<RefTarget> {
819        self.view.with_ref(|v| v.get_local_branch(name))
820    }
821
822    pub fn set_local_branch(&mut self, name: String, target: RefTarget) {
823        self.view_mut().set_local_branch(name, target);
824    }
825
826    pub fn remove_local_branch(&mut self, name: &str) {
827        self.view_mut().remove_local_branch(name);
828    }
829
830    pub fn get_remote_branch(&self, name: &str, remote_name: &str) -> Option<RefTarget> {
831        self.view
832            .with_ref(|v| v.get_remote_branch(name, remote_name))
833    }
834
835    pub fn set_remote_branch(&mut self, name: String, remote_name: String, target: RefTarget) {
836        self.view_mut().set_remote_branch(name, remote_name, target);
837    }
838
839    pub fn remove_remote_branch(&mut self, name: &str, remote_name: &str) {
840        self.view_mut().remove_remote_branch(name, remote_name);
841    }
842
843    pub fn rename_remote(&mut self, old: &str, new: &str) {
844        self.view_mut().rename_remote(old, new);
845    }
846
847    pub fn get_tag(&self, name: &str) -> Option<RefTarget> {
848        self.view.with_ref(|v| v.get_tag(name))
849    }
850
851    pub fn set_tag(&mut self, name: String, target: RefTarget) {
852        self.view_mut().set_tag(name, target);
853    }
854
855    pub fn remove_tag(&mut self, name: &str) {
856        self.view_mut().remove_tag(name);
857    }
858
859    pub fn get_git_ref(&self, name: &str) -> Option<RefTarget> {
860        self.view.with_ref(|v| v.get_git_ref(name))
861    }
862
863    pub fn set_git_ref(&mut self, name: String, target: RefTarget) {
864        self.view_mut().set_git_ref(name, target);
865    }
866
867    pub fn remove_git_ref(&mut self, name: &str) {
868        self.view_mut().remove_git_ref(name);
869    }
870
871    pub fn set_git_head(&mut self, target: RefTarget) {
872        self.view_mut().set_git_head(target);
873    }
874
875    pub fn clear_git_head(&mut self) {
876        self.view_mut().clear_git_head();
877    }
878
879    pub fn set_view(&mut self, data: op_store::View) {
880        self.view_mut().set_view(data);
881        self.view.mark_dirty();
882    }
883
884    pub fn merge(&mut self, base_repo: &ReadonlyRepo, other_repo: &ReadonlyRepo) {
885        // First, merge the index, so we can take advantage of a valid index when
886        // merging the view. Merging in base_repo's index isn't typically
887        // necessary, but it can be if base_repo is ahead of either self or other_repo
888        // (e.g. because we're undoing an operation that hasn't been published).
889        self.index.merge_in(base_repo.readonly_index());
890        self.index.merge_in(other_repo.readonly_index());
891
892        self.view.ensure_clean(|v| self.enforce_view_invariants(v));
893        self.merge_view(&base_repo.view, &other_repo.view);
894        self.view.mark_dirty();
895    }
896
897    fn merge_view(&mut self, base: &View, other: &View) {
898        // Merge working-copy commits. If there's a conflict, we keep the self side.
899        for (workspace_id, base_wc_commit) in base.wc_commit_ids() {
900            let self_wc_commit = self.view().get_wc_commit_id(workspace_id);
901            let other_wc_commit = other.get_wc_commit_id(workspace_id);
902            if other_wc_commit == Some(base_wc_commit) || other_wc_commit == self_wc_commit {
903                // The other side didn't change or both sides changed in the
904                // same way.
905            } else if let Some(other_wc_commit) = other_wc_commit {
906                if self_wc_commit == Some(base_wc_commit) {
907                    self.view_mut()
908                        .set_wc_commit(workspace_id.clone(), other_wc_commit.clone());
909                }
910            } else {
911                // The other side removed the workspace. We want to remove it even if the self
912                // side changed the working-copy commit.
913                self.view_mut().remove_wc_commit(workspace_id);
914            }
915        }
916        for (workspace_id, other_wc_commit) in other.wc_commit_ids() {
917            if self.view().get_wc_commit_id(workspace_id).is_none()
918                && base.get_wc_commit_id(workspace_id).is_none()
919            {
920                // The other side added the workspace.
921                self.view_mut()
922                    .set_wc_commit(workspace_id.clone(), other_wc_commit.clone());
923            }
924        }
925
926        for removed_head in base.public_heads().difference(other.public_heads()) {
927            self.view_mut().remove_public_head(removed_head);
928        }
929        for added_head in other.public_heads().difference(base.public_heads()) {
930            self.view_mut().add_public_head(added_head);
931        }
932
933        let base_heads = base.heads().iter().cloned().collect_vec();
934        let own_heads = self.view().heads().iter().cloned().collect_vec();
935        let other_heads = other.heads().iter().cloned().collect_vec();
936        self.record_rewrites(&base_heads, &own_heads);
937        self.record_rewrites(&base_heads, &other_heads);
938        // No need to remove heads removed by `other` because we already marked them
939        // abandoned or rewritten.
940        for added_head in other.heads().difference(base.heads()) {
941            self.view_mut().add_head(added_head);
942        }
943
944        let mut maybe_changed_ref_names = HashSet::new();
945
946        let base_branches: HashSet<_> = base.branches().keys().cloned().collect();
947        let other_branches: HashSet<_> = other.branches().keys().cloned().collect();
948        for branch_name in base_branches.union(&other_branches) {
949            let base_branch = base.branches().get(branch_name);
950            let other_branch = other.branches().get(branch_name);
951            if other_branch == base_branch {
952                // Unchanged on other side
953                continue;
954            }
955
956            maybe_changed_ref_names.insert(RefName::LocalBranch(branch_name.clone()));
957            if let Some(branch) = base_branch {
958                for remote in branch.remote_targets.keys() {
959                    maybe_changed_ref_names.insert(RefName::RemoteBranch {
960                        branch: branch_name.clone(),
961                        remote: remote.clone(),
962                    });
963                }
964            }
965            if let Some(branch) = other_branch {
966                for remote in branch.remote_targets.keys() {
967                    maybe_changed_ref_names.insert(RefName::RemoteBranch {
968                        branch: branch_name.clone(),
969                        remote: remote.clone(),
970                    });
971                }
972            }
973        }
974
975        for tag_name in base.tags().keys() {
976            maybe_changed_ref_names.insert(RefName::Tag(tag_name.clone()));
977        }
978        for tag_name in other.tags().keys() {
979            maybe_changed_ref_names.insert(RefName::Tag(tag_name.clone()));
980        }
981
982        for git_ref_name in base.git_refs().keys() {
983            maybe_changed_ref_names.insert(RefName::GitRef(git_ref_name.clone()));
984        }
985        for git_ref_name in other.git_refs().keys() {
986            maybe_changed_ref_names.insert(RefName::GitRef(git_ref_name.clone()));
987        }
988
989        for ref_name in maybe_changed_ref_names {
990            let base_target = base.get_ref(&ref_name);
991            let other_target = other.get_ref(&ref_name);
992            self.view.get_mut().merge_single_ref(
993                &self.index,
994                &ref_name,
995                base_target.as_ref(),
996                other_target.as_ref(),
997            );
998        }
999
1000        if let Some(new_git_head) = merge_ref_targets(
1001            &self.index,
1002            self.view().git_head(),
1003            base.git_head(),
1004            other.git_head(),
1005        ) {
1006            self.set_git_head(new_git_head);
1007        } else {
1008            self.clear_git_head();
1009        }
1010    }
1011
1012    /// Finds and records commits that were rewritten or abandoned between
1013    /// `old_heads` and `new_heads`.
1014    fn record_rewrites(&mut self, old_heads: &[CommitId], new_heads: &[CommitId]) {
1015        let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
1016        for removed in self.index.walk_revs(old_heads, new_heads) {
1017            removed_changes
1018                .entry(removed.change_id())
1019                .or_default()
1020                .push(removed.commit_id());
1021        }
1022        if removed_changes.is_empty() {
1023            return;
1024        }
1025
1026        let mut rewritten_changes = HashSet::new();
1027        let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
1028        for added in self.index.walk_revs(new_heads, old_heads) {
1029            let change_id = added.change_id();
1030            if let Some(old_commits) = removed_changes.get(&change_id) {
1031                for old_commit in old_commits {
1032                    rewritten_commits
1033                        .entry(old_commit.clone())
1034                        .or_default()
1035                        .push(added.commit_id());
1036                }
1037            }
1038            rewritten_changes.insert(change_id);
1039        }
1040        for (old_commit, new_commits) in rewritten_commits {
1041            for new_commit in new_commits {
1042                self.record_rewritten_commit(old_commit.clone(), new_commit);
1043            }
1044        }
1045
1046        for (change_id, removed_commit_ids) in &removed_changes {
1047            if !rewritten_changes.contains(change_id) {
1048                for removed_commit_id in removed_commit_ids {
1049                    self.record_abandoned_commit(removed_commit_id.clone());
1050                }
1051            }
1052        }
1053    }
1054
1055    pub fn merge_single_ref(
1056        &mut self,
1057        ref_name: &RefName,
1058        base_target: Option<&RefTarget>,
1059        other_target: Option<&RefTarget>,
1060    ) {
1061        self.view
1062            .get_mut()
1063            .merge_single_ref(&self.index, ref_name, base_target, other_target);
1064    }
1065}
1066
1067impl Repo for MutableRepo {
1068    fn base_repo(&self) -> &Arc<ReadonlyRepo> {
1069        &self.base_repo
1070    }
1071
1072    fn store(&self) -> &Arc<Store> {
1073        self.base_repo.store()
1074    }
1075
1076    fn op_store(&self) -> &Arc<dyn OpStore> {
1077        self.base_repo.op_store()
1078    }
1079
1080    fn index(&self) -> &dyn Index {
1081        &self.index
1082    }
1083
1084    fn view(&self) -> &View {
1085        self.view
1086            .get_or_ensure_clean(|v| self.enforce_view_invariants(v))
1087    }
1088
1089    fn resolve_change_id_prefix(&self, prefix: &HexPrefix) -> PrefixResolution<Vec<IndexEntry>> {
1090        // TODO: Create a persistent lookup from change id to (visible?) commit ids.
1091        let mut found_change_id = None;
1092        let mut found_entries = vec![];
1093        let heads = self.view().heads().iter().cloned().collect_vec();
1094        for entry in self.index().walk_revs(&heads, &[]) {
1095            let change_id = entry.change_id();
1096            if prefix.matches(&change_id) {
1097                if let Some(previous_change_id) = found_change_id.replace(change_id.clone()) {
1098                    if previous_change_id != change_id {
1099                        return PrefixResolution::AmbiguousMatch;
1100                    }
1101                }
1102                found_entries.push(entry);
1103            }
1104        }
1105        if found_change_id.is_none() {
1106            return PrefixResolution::NoMatch;
1107        }
1108        PrefixResolution::SingleMatch(found_entries)
1109    }
1110
1111    fn shortest_unique_change_id_prefix_len(&self, target_id: &ChangeId) -> usize {
1112        target_id.as_bytes().len() * 2 // TODO
1113    }
1114}
1115
1116/// Error from attempts to check out the root commit for editing
1117#[derive(Debug, Error)]
1118#[error("Cannot rewrite the root commit")]
1119pub struct RewriteRootCommit;
1120
1121/// Error from attempts to edit a commit
1122#[derive(Debug, Error)]
1123pub enum EditCommitError {
1124    #[error("Current working-copy commit not found: {0}")]
1125    WorkingCopyCommitNotFound(BackendError),
1126    #[error("Cannot rewrite the root commit")]
1127    RewriteRootCommit,
1128}
1129
1130/// Error from attempts to check out a commit
1131#[derive(Debug, Error)]
1132pub enum CheckOutCommitError {
1133    #[error("Failed to create new working-copy commit: {0}")]
1134    CreateCommit(#[from] BackendError),
1135    #[error("Failed to edit commit: {0}")]
1136    EditCommit(#[from] EditCommitError),
1137}
1138
1139#[derive(Debug, Error)]
1140#[error("Cannot access {path}")]
1141pub struct PathError {
1142    pub path: PathBuf,
1143    #[source]
1144    pub error: io::Error,
1145}
1146
1147pub(crate) trait IoResultExt<T> {
1148    fn context(self, path: impl AsRef<Path>) -> Result<T, PathError>;
1149}
1150
1151impl<T> IoResultExt<T> for io::Result<T> {
1152    fn context(self, path: impl AsRef<Path>) -> Result<T, PathError> {
1153        self.map_err(|error| PathError {
1154            path: path.as_ref().to_path_buf(),
1155            error,
1156        })
1157    }
1158}
1159
1160mod dirty_cell {
1161    use std::cell::{Cell, RefCell};
1162
1163    /// Cell that lazily updates the value after `mark_dirty()`.
1164    #[derive(Clone, Debug)]
1165    pub struct DirtyCell<T> {
1166        value: RefCell<T>,
1167        dirty: Cell<bool>,
1168    }
1169
1170    impl<T> DirtyCell<T> {
1171        pub fn with_clean(value: T) -> Self {
1172            DirtyCell {
1173                value: RefCell::new(value),
1174                dirty: Cell::new(false),
1175            }
1176        }
1177
1178        pub fn get_or_ensure_clean(&self, f: impl FnOnce(&mut T)) -> &T {
1179            // SAFETY: get_mut/mark_dirty(&mut self) should invalidate any previously-clean
1180            // references leaked by this method. Clean value never changes until then.
1181            self.ensure_clean(f);
1182            unsafe { &*self.value.as_ptr() }
1183        }
1184
1185        pub fn ensure_clean(&self, f: impl FnOnce(&mut T)) {
1186            if self.dirty.get() {
1187                // This borrow_mut() ensures that there is no dirty temporary reference.
1188                // Panics if ensure_clean() is invoked from with_ref() callback for example.
1189                f(&mut self.value.borrow_mut());
1190                self.dirty.set(false);
1191            }
1192        }
1193
1194        pub fn into_inner(self) -> T {
1195            self.value.into_inner()
1196        }
1197
1198        pub fn with_ref<R>(&self, f: impl FnOnce(&T) -> R) -> R {
1199            f(&self.value.borrow())
1200        }
1201
1202        pub fn get_mut(&mut self) -> &mut T {
1203            self.value.get_mut()
1204        }
1205
1206        pub fn mark_dirty(&mut self) {
1207            *self.dirty.get_mut() = true;
1208        }
1209    }
1210}
1211
1212type ChangeIdIndex = IdIndex<ChangeId, IndexPosition>;
1213
1214#[derive(Debug, Clone)]
1215pub struct IdIndex<K, V>(Vec<(K, V)>);
1216
1217impl<K, V> IdIndex<K, V>
1218where
1219    K: ObjectId + Ord,
1220{
1221    /// Creates new index from the given entries. Multiple values can be
1222    /// associated with a single key.
1223    pub fn from_vec(mut vec: Vec<(K, V)>) -> Self {
1224        vec.sort_unstable_by(|(k0, _), (k1, _)| k0.cmp(k1));
1225        IdIndex(vec)
1226    }
1227
1228    /// Looks up entries with the given prefix, and collects values if matched
1229    /// entries have unambiguous keys.
1230    pub fn resolve_prefix_with<U>(
1231        &self,
1232        prefix: &HexPrefix,
1233        mut value_mapper: impl FnMut(&V) -> U,
1234    ) -> PrefixResolution<Vec<U>> {
1235        let mut range = self.resolve_prefix_range(prefix).peekable();
1236        if let Some((first_key, _)) = range.peek().copied() {
1237            let maybe_entries: Option<Vec<_>> = range
1238                .map(|(k, v)| (k == first_key).then(|| value_mapper(v)))
1239                .collect();
1240            if let Some(entries) = maybe_entries {
1241                PrefixResolution::SingleMatch(entries)
1242            } else {
1243                PrefixResolution::AmbiguousMatch
1244            }
1245        } else {
1246            PrefixResolution::NoMatch
1247        }
1248    }
1249
1250    /// Iterates over entries with the given prefix.
1251    pub fn resolve_prefix_range<'a: 'b, 'b>(
1252        &'a self,
1253        prefix: &'b HexPrefix,
1254    ) -> impl Iterator<Item = (&'a K, &'a V)> + 'b {
1255        let min_bytes = prefix.min_prefix_bytes();
1256        let pos = self.0.partition_point(|(k, _)| k.as_bytes() < min_bytes);
1257        self.0[pos..]
1258            .iter()
1259            .take_while(|(k, _)| prefix.matches(k))
1260            .map(|(k, v)| (k, v))
1261    }
1262
1263    /// This function returns the shortest length of a prefix of `key` that
1264    /// disambiguates it from every other key in the index.
1265    ///
1266    /// The length to be returned is a number of hexadecimal digits.
1267    ///
1268    /// This has some properties that we do not currently make much use of:
1269    ///
1270    /// - The algorithm works even if `key` itself is not in the index.
1271    ///
1272    /// - In the special case when there are keys in the trie for which our
1273    ///   `key` is an exact prefix, returns `key.len() + 1`. Conceptually, in
1274    ///   order to disambiguate, you need every letter of the key *and* the
1275    ///   additional fact that it's the entire key). This case is extremely
1276    ///   unlikely for hashes with 12+ hexadecimal characters.
1277    pub fn shortest_unique_prefix_len(&self, key: &K) -> usize {
1278        let pos = self.0.partition_point(|(k, _)| k < key);
1279        let left = pos.checked_sub(1).map(|p| &self.0[p]);
1280        let right = self.0[pos..].iter().find(|(k, _)| k != key);
1281        itertools::chain(left, right)
1282            .map(|(neighbor, _value)| {
1283                backend::common_hex_len(key.as_bytes(), neighbor.as_bytes()) + 1
1284            })
1285            .max()
1286            .unwrap_or(0)
1287    }
1288}
1289
1290#[cfg(test)]
1291mod tests {
1292    use super::*;
1293
1294    #[test]
1295    fn test_id_index_resolve_prefix() {
1296        fn sorted(resolution: PrefixResolution<Vec<i32>>) -> PrefixResolution<Vec<i32>> {
1297            match resolution {
1298                PrefixResolution::SingleMatch(mut xs) => {
1299                    xs.sort(); // order of values might not be preserved by IdIndex
1300                    PrefixResolution::SingleMatch(xs)
1301                }
1302                _ => resolution,
1303            }
1304        }
1305        let id_index = IdIndex::from_vec(vec![
1306            (ChangeId::from_hex("0000"), 0),
1307            (ChangeId::from_hex("0099"), 1),
1308            (ChangeId::from_hex("0099"), 2),
1309            (ChangeId::from_hex("0aaa"), 3),
1310            (ChangeId::from_hex("0aab"), 4),
1311        ]);
1312        assert_eq!(
1313            id_index.resolve_prefix_with(&HexPrefix::new("0").unwrap(), |&v| v),
1314            PrefixResolution::AmbiguousMatch,
1315        );
1316        assert_eq!(
1317            id_index.resolve_prefix_with(&HexPrefix::new("00").unwrap(), |&v| v),
1318            PrefixResolution::AmbiguousMatch,
1319        );
1320        assert_eq!(
1321            id_index.resolve_prefix_with(&HexPrefix::new("000").unwrap(), |&v| v),
1322            PrefixResolution::SingleMatch(vec![0]),
1323        );
1324        assert_eq!(
1325            id_index.resolve_prefix_with(&HexPrefix::new("0001").unwrap(), |&v| v),
1326            PrefixResolution::NoMatch,
1327        );
1328        assert_eq!(
1329            sorted(id_index.resolve_prefix_with(&HexPrefix::new("009").unwrap(), |&v| v)),
1330            PrefixResolution::SingleMatch(vec![1, 2]),
1331        );
1332        assert_eq!(
1333            id_index.resolve_prefix_with(&HexPrefix::new("0aa").unwrap(), |&v| v),
1334            PrefixResolution::AmbiguousMatch,
1335        );
1336        assert_eq!(
1337            id_index.resolve_prefix_with(&HexPrefix::new("0aab").unwrap(), |&v| v),
1338            PrefixResolution::SingleMatch(vec![4]),
1339        );
1340        assert_eq!(
1341            id_index.resolve_prefix_with(&HexPrefix::new("f").unwrap(), |&v| v),
1342            PrefixResolution::NoMatch,
1343        );
1344    }
1345
1346    #[test]
1347    fn test_id_index_shortest_unique_prefix_len() {
1348        // No crash if empty
1349        let id_index = IdIndex::from_vec(vec![] as Vec<(ChangeId, ())>);
1350        assert_eq!(
1351            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("00")),
1352            0
1353        );
1354
1355        let id_index = IdIndex::from_vec(vec![
1356            (ChangeId::from_hex("ab"), ()),
1357            (ChangeId::from_hex("acd0"), ()),
1358            (ChangeId::from_hex("acd0"), ()), // duplicated key is allowed
1359        ]);
1360        assert_eq!(
1361            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("acd0")),
1362            2
1363        );
1364        assert_eq!(
1365            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("ac")),
1366            3
1367        );
1368
1369        let id_index = IdIndex::from_vec(vec![
1370            (ChangeId::from_hex("ab"), ()),
1371            (ChangeId::from_hex("acd0"), ()),
1372            (ChangeId::from_hex("acf0"), ()),
1373            (ChangeId::from_hex("a0"), ()),
1374            (ChangeId::from_hex("ba"), ()),
1375        ]);
1376
1377        assert_eq!(
1378            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("a0")),
1379            2
1380        );
1381        assert_eq!(
1382            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("ba")),
1383            1
1384        );
1385        assert_eq!(
1386            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("ab")),
1387            2
1388        );
1389        assert_eq!(
1390            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("acd0")),
1391            3
1392        );
1393        // If it were there, the length would be 1.
1394        assert_eq!(
1395            id_index.shortest_unique_prefix_len(&ChangeId::from_hex("c0")),
1396            1
1397        );
1398    }
1399}