pub struct Store {
pub write_reflog: WriteReflog,
pub namespace: Option<Namespace>,
/* private fields */
}
Expand description
A store for reference which uses plain files.
Each ref is represented as a single file on disk in a folder structure that follows the relative path used to identify references.
Fields§
§write_reflog: WriteReflog
The way to handle reflog edits
namespace: Option<Namespace>
The namespace to use for edits and reads
Implementations§
source§impl Store
impl Store
sourcepub fn git_dir(&self) -> &Path
pub fn git_dir(&self) -> &Path
Return the .git
directory at which all references are loaded.
For worktrees, this is the linked work-tree private ref location,
then common_dir()
is Some(parent_git_dir)
.
Examples found in repository?
326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405
pub fn iter_packed<'s, 'p>(
&'s self,
packed: Option<&'p packed::Buffer>,
) -> std::io::Result<LooseThenPacked<'p, 's>> {
match self.namespace.as_ref() {
Some(namespace) => self.iter_from_info(
IterInfo::PrefixAndBase {
base: self.git_dir(),
prefix: namespace.to_path(),
},
self.common_dir().map(|base| IterInfo::PrefixAndBase {
base,
prefix: namespace.to_path(),
}),
packed,
),
None => self.iter_from_info(
IterInfo::Base { base: self.git_dir() },
self.common_dir().map(|base| IterInfo::Base { base }),
packed,
),
}
}
/// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads".
///
/// Please note that "refs/heads` or "refs\\heads" is equivalent to "refs/heads/"
pub fn iter_prefixed_packed<'s, 'p>(
&'s self,
prefix: impl AsRef<Path>,
packed: Option<&'p packed::Buffer>,
) -> std::io::Result<LooseThenPacked<'p, 's>> {
match self.namespace.as_ref() {
None => {
let prefix = prefix.as_ref();
let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.into())?;
let common_dir_info = self
.common_dir()
.map(|base| IterInfo::from_prefix(base, prefix.into()))
.transpose()?;
self.iter_from_info(git_dir_info, common_dir_info, packed)
}
Some(namespace) => {
let prefix = namespace.to_owned().into_namespaced_prefix(prefix);
let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.clone().into())?;
let common_dir_info = self
.common_dir()
.map(|base| IterInfo::from_prefix(base, prefix.into()))
.transpose()?;
self.iter_from_info(git_dir_info, common_dir_info, packed)
}
}
}
fn iter_from_info<'s, 'p>(
&'s self,
git_dir_info: IterInfo<'_>,
common_dir_info: Option<IterInfo<'_>>,
packed: Option<&'p packed::Buffer>,
) -> std::io::Result<LooseThenPacked<'p, 's>> {
Ok(LooseThenPacked {
git_dir: self.git_dir(),
common_dir: self.common_dir(),
iter_packed: match packed {
Some(packed) => Some(
match git_dir_info.prefix() {
Some(prefix) => packed.iter_prefixed(path_to_name(prefix).into_owned()),
None => packed.iter(),
}
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?
.peekable(),
),
None => None,
},
iter_git_dir: git_dir_info.into_iter(),
iter_common_dir: common_dir_info.map(IterInfo::into_iter),
buf: Vec::new(),
namespace: self.namespace.as_ref(),
})
}
sourcepub fn common_dir(&self) -> Option<&Path>
pub fn common_dir(&self) -> Option<&Path>
If this is a linked work tree, there will be Some(git_dir)
pointing to the parent repository,
while git_dir()
points to the location holding linked work-tree private references.
Examples found in repository?
326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405
pub fn iter_packed<'s, 'p>(
&'s self,
packed: Option<&'p packed::Buffer>,
) -> std::io::Result<LooseThenPacked<'p, 's>> {
match self.namespace.as_ref() {
Some(namespace) => self.iter_from_info(
IterInfo::PrefixAndBase {
base: self.git_dir(),
prefix: namespace.to_path(),
},
self.common_dir().map(|base| IterInfo::PrefixAndBase {
base,
prefix: namespace.to_path(),
}),
packed,
),
None => self.iter_from_info(
IterInfo::Base { base: self.git_dir() },
self.common_dir().map(|base| IterInfo::Base { base }),
packed,
),
}
}
/// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads".
///
/// Please note that "refs/heads` or "refs\\heads" is equivalent to "refs/heads/"
pub fn iter_prefixed_packed<'s, 'p>(
&'s self,
prefix: impl AsRef<Path>,
packed: Option<&'p packed::Buffer>,
) -> std::io::Result<LooseThenPacked<'p, 's>> {
match self.namespace.as_ref() {
None => {
let prefix = prefix.as_ref();
let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.into())?;
let common_dir_info = self
.common_dir()
.map(|base| IterInfo::from_prefix(base, prefix.into()))
.transpose()?;
self.iter_from_info(git_dir_info, common_dir_info, packed)
}
Some(namespace) => {
let prefix = namespace.to_owned().into_namespaced_prefix(prefix);
let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.clone().into())?;
let common_dir_info = self
.common_dir()
.map(|base| IterInfo::from_prefix(base, prefix.into()))
.transpose()?;
self.iter_from_info(git_dir_info, common_dir_info, packed)
}
}
}
fn iter_from_info<'s, 'p>(
&'s self,
git_dir_info: IterInfo<'_>,
common_dir_info: Option<IterInfo<'_>>,
packed: Option<&'p packed::Buffer>,
) -> std::io::Result<LooseThenPacked<'p, 's>> {
Ok(LooseThenPacked {
git_dir: self.git_dir(),
common_dir: self.common_dir(),
iter_packed: match packed {
Some(packed) => Some(
match git_dir_info.prefix() {
Some(prefix) => packed.iter_prefixed(path_to_name(prefix).into_owned()),
None => packed.iter(),
}
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?
.peekable(),
),
None => None,
},
iter_git_dir: git_dir_info.into_iter(),
iter_common_dir: common_dir_info.map(IterInfo::into_iter),
buf: Vec::new(),
namespace: self.namespace.as_ref(),
})
}
sourcepub fn common_dir_resolved(&self) -> &Path
pub fn common_dir_resolved(&self) -> &Path
Similar to common_dir()
, but it will produce either the common-dir, or the git-dir if the former
isn’t present.
This is also the directory in which the packed references file would be placed.
Examples found in repository?
More examples
160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
pub(crate) fn to_base_dir_and_relative_name<'a>(
&self,
name: &'a FullNameRef,
is_reflog: bool,
) -> (Cow<'_, Path>, &'a FullNameRef) {
let commondir = self.common_dir_resolved();
let linked_git_dir =
|worktree_name: &BStr| commondir.join("worktrees").join(git_path::from_bstr(worktree_name));
name.category_and_short_name()
.and_then(|(c, sn)| {
use crate::Category::*;
let sn = FullNameRef::new_unchecked(sn);
Some(match c {
LinkedPseudoRef { name: worktree_name } => is_reflog
.then(|| (linked_git_dir(worktree_name).into(), sn))
.unwrap_or((commondir.into(), name)),
Tag | LocalBranch | RemoteBranch | Note => (commondir.into(), name),
MainRef | MainPseudoRef => (commondir.into(), sn),
LinkedRef { name: worktree_name } => sn
.category()
.map_or(false, |cat| cat.is_worktree_private())
.then(|| {
if is_reflog {
(linked_git_dir(worktree_name).into(), sn)
} else {
(commondir.into(), name)
}
})
.unwrap_or((commondir.into(), sn)),
PseudoRef | Bisect | Rewritten | WorktreePrivate => return None,
})
})
.unwrap_or((self.git_dir.as_path().into(), name))
}
source§impl Store
impl Store
sourcepub fn reflog_exists<'a, Name, E>(&self, name: Name) -> Result<bool, E>where
Name: TryInto<&'a FullNameRef, Error = E>,
Error: From<E>,
pub fn reflog_exists<'a, Name, E>(&self, name: Name) -> Result<bool, E>where
Name: TryInto<&'a FullNameRef, Error = E>,
Error: From<E>,
Returns true if a reflog exists for the given reference name
.
Please note that this method shouldn’t be used to check if a log exists before trying to read it, but instead is meant to be the fastest possible way to determine if a log exists or not. If the caller needs to know if it’s readable, try to read the log instead with a reverse or forward iterator.
Examples found in repository?
More examples
sourcepub fn reflog_iter_rev<'a, 'b, Name, E>(
&self,
name: Name,
buf: &'b mut [u8]
) -> Result<Option<Reverse<'b, File>>, Error>where
Name: TryInto<&'a FullNameRef, Error = E>,
Error: From<E>,
pub fn reflog_iter_rev<'a, 'b, Name, E>(
&self,
name: Name,
buf: &'b mut [u8]
) -> Result<Option<Reverse<'b, File>>, Error>where
Name: TryInto<&'a FullNameRef, Error = E>,
Error: From<E>,
Return a reflog reverse iterator for the given fully qualified name
, reading chunks from the back into the fixed buffer buf
.
The iterator will traverse log entries from most recent to oldest, reading the underlying file in chunks from the back.
Return Ok(None)
if no reflog exists.
Examples found in repository?
More examples
sourcepub fn reflog_iter<'a, 'b, Name, E>(
&self,
name: Name,
buf: &'b mut Vec<u8>
) -> Result<Option<Forward<'b>>, Error>where
Name: TryInto<&'a FullNameRef, Error = E>,
Error: From<E>,
pub fn reflog_iter<'a, 'b, Name, E>(
&self,
name: Name,
buf: &'b mut Vec<u8>
) -> Result<Option<Forward<'b>>, Error>where
Name: TryInto<&'a FullNameRef, Error = E>,
Error: From<E>,
Return a reflog forward iterator for the given fully qualified name
and write its file contents into buf
.
The iterator will traverse log entries from oldest to newest.
Return Ok(None)
if no reflog exists.
Examples found in repository?
More examples
source§impl Store
impl Store
sourcepub fn loose_iter(&self) -> Result<LooseThenPacked<'_, '_>>
pub fn loose_iter(&self) -> Result<LooseThenPacked<'_, '_>>
Return an iterator over all loose references, notably not including any packed ones, in lexical order. Each of the references may fail to parse and the iterator will not stop if parsing fails, allowing the caller to see all files that look like references whether valid or not.
Reference files that do not constitute valid names will be silently ignored.
sourcepub fn loose_iter_prefixed(
&self,
prefix: impl AsRef<Path>
) -> Result<LooseThenPacked<'_, '_>>
pub fn loose_iter_prefixed(
&self,
prefix: impl AsRef<Path>
) -> Result<LooseThenPacked<'_, '_>>
Return an iterator over all loose references that start with the given prefix
.
Otherwise it’s similar to loose_iter()
.
source§impl Store
impl Store
sourcepub fn at(
git_dir: impl Into<PathBuf>,
write_reflog: WriteReflog,
object_hash: Kind
) -> Self
pub fn at(
git_dir: impl Into<PathBuf>,
write_reflog: WriteReflog,
object_hash: Kind
) -> Self
Create a new instance at the given git_dir
, which commonly is a standard git repository with a
refs/
subdirectory.
The object_hash
defines which kind of hash we should recognize.
Examples found in repository?
24 25 26 27 28 29 30 31 32 33 34 35 36 37
pub fn at(
git_dir: impl Into<PathBuf>,
reflog_mode: WriteReflog,
object_hash: git_hash::Kind,
) -> Result<Self, Error> {
// for now, just try to read the directory - later we will do that naturally as we have to figure out if it's a ref-table or not.
let git_dir = git_dir.into();
std::fs::read_dir(&git_dir)?;
Ok(crate::Store {
inner: crate::store::State::Loose {
store: file::Store::at(git_dir, reflog_mode, object_hash),
},
})
}
sourcepub fn for_linked_worktree(
git_dir: impl Into<PathBuf>,
common_dir: impl Into<PathBuf>,
write_reflog: WriteReflog,
object_hash: Kind
) -> Self
pub fn for_linked_worktree(
git_dir: impl Into<PathBuf>,
common_dir: impl Into<PathBuf>,
write_reflog: WriteReflog,
object_hash: Kind
) -> Self
Like at()
, but for linked work-trees which use git_dir
as private ref store and common_dir
for
shared references.
source§impl Store
impl Store
sourcepub fn iter_packed<'s, 'p>(
&'s self,
packed: Option<&'p Buffer>
) -> Result<LooseThenPacked<'p, 's>>
pub fn iter_packed<'s, 'p>(
&'s self,
packed: Option<&'p Buffer>
) -> Result<LooseThenPacked<'p, 's>>
Return an iterator over all references, loose or packed
, sorted by their name.
Errors are returned similarly to what would happen when loose and packed refs where iterated by themeselves.
Examples found in repository?
More examples
sourcepub fn iter_prefixed_packed<'s, 'p>(
&'s self,
prefix: impl AsRef<Path>,
packed: Option<&'p Buffer>
) -> Result<LooseThenPacked<'p, 's>>
pub fn iter_prefixed_packed<'s, 'p>(
&'s self,
prefix: impl AsRef<Path>,
packed: Option<&'p Buffer>
) -> Result<LooseThenPacked<'p, 's>>
As iter(…)
, but filters by prefix
, i.e. “refs/heads”.
Please note that “refs/heads` or “refs\heads” is equivalent to “refs/heads/”
Examples found in repository?
More examples
source§impl Store
impl Store
sourcepub fn find<'a, Name, E>(&self, partial: Name) -> Result<Reference, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn find<'a, Name, E>(&self, partial: Name) -> Result<Reference, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Similar to file::Store::try_find()
but a non-existing ref is treated as error.
sourcepub fn find_packed<'a, Name, E>(
&self,
partial: Name,
packed: Option<&Buffer>
) -> Result<Reference, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn find_packed<'a, Name, E>(
&self,
partial: Name,
packed: Option<&Buffer>
) -> Result<Reference, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Similar to file::Store::find()
, but supports a stable packed buffer.
sourcepub fn find_loose<'a, Name, E>(&self, partial: Name) -> Result<Reference, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn find_loose<'a, Name, E>(&self, partial: Name) -> Result<Reference, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Similar to file::Store::find()
won’t handle packed-refs.
source§impl Store
impl Store
sourcepub fn try_find<'a, Name, E>(
&self,
partial: Name
) -> Result<Option<Reference>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn try_find<'a, Name, E>(
&self,
partial: Name
) -> Result<Option<Reference>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Find a single reference by the given path
which is required to be a valid reference name.
Returns Ok(None)
if no such ref exists.
Note
- The lookup algorithm follows the one in the git documentation.
- The packed buffer is checked for modifications each time the method is called. See
file::Store::try_find_packed()
for a version with more control.
sourcepub fn try_find_loose<'a, Name, E>(
&self,
partial: Name
) -> Result<Option<Reference>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn try_find_loose<'a, Name, E>(
&self,
partial: Name
) -> Result<Option<Reference>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Similar to file::Store::find()
but a non-existing ref is treated as error.
Find only loose references, that is references that aren’t in the packed-refs buffer.
All symbolic references are loose references.
HEAD
is always a loose reference.
sourcepub fn try_find_packed<'a, Name, E>(
&self,
partial: Name,
packed: Option<&Buffer>
) -> Result<Option<Reference>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn try_find_packed<'a, Name, E>(
&self,
partial: Name,
packed: Option<&Buffer>
) -> Result<Option<Reference>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Similar to file::Store::find()
, but allows to pass a snapshotted packed buffer instead.
Examples found in repository?
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173
fn follow_packed(
&self,
store: &file::Store,
packed: Option<&packed::Buffer>,
) -> Option<Result<Reference, file::find::existing::Error>> {
match self.peeled {
Some(peeled) => Some(Ok(Reference {
name: self.name.clone(),
target: Target::Peeled(peeled),
peeled: None,
})),
None => match &self.target {
Target::Peeled(_) => None,
Target::Symbolic(full_name) => match store.try_find_packed(full_name.as_ref(), packed) {
Ok(Some(next)) => Some(Ok(next)),
Ok(None) => Some(Err(file::find::existing::Error::NotFound {
name: full_name.to_path().to_owned(),
})),
Err(err) => Some(Err(file::find::existing::Error::Find(err))),
},
},
}
}
source§impl Store
impl Store
Edits
sourcepub fn transaction(&self) -> Transaction<'_, '_>
pub fn transaction(&self) -> Transaction<'_, '_>
Open a transaction with the given edits
, and determine how to fail if a lock
cannot be obtained.
A snapshot of packed references will be obtained automatically if needed to fulfill this transaction
and will be provided as result of a successful transaction. Note that upon transaction failure, packed-refs
will never have been altered.
The transaction inherits the parent namespace.
source§impl Store
impl Store
sourcepub fn open_packed_buffer(&self) -> Result<Option<Buffer>, Error>
pub fn open_packed_buffer(&self) -> Result<Option<Buffer>, Error>
Try to open a new packed buffer. It’s not an error if it doesn’t exist, but yields Ok(None)
.
Examples found in repository?
82 83 84 85 86 87 88 89 90 91 92 93 94 95
pub(crate) fn force_refresh_packed_buffer(&self) -> Result<(), packed::buffer::open::Error> {
self.packed.force_refresh(|| {
let modified = self.packed_refs_path().metadata()?.modified()?;
self.open_packed_buffer().map(|packed| Some(modified).zip(packed))
})
}
pub(crate) fn assure_packed_refs_uptodate(
&self,
) -> Result<Option<super::SharedBufferSnapshot>, packed::buffer::open::Error> {
self.packed.recent_snapshot(
|| self.packed_refs_path().metadata().and_then(|m| m.modified()).ok(),
|| self.open_packed_buffer(),
)
}
sourcepub fn cached_packed_buffer(
&self
) -> Result<Option<SharedBufferSnapshot>, Error>
pub fn cached_packed_buffer(
&self
) -> Result<Option<SharedBufferSnapshot>, Error>
Return a possibly cached packed buffer with shared ownership. At retrieval it will assure it’s up to date, but after that it can be considered a snapshot as it cannot change anymore.
Use this to make successive calls to file::Store::try_find_packed()
or obtain iterators using file::Store::iter_packed()
in a way that assures the packed-refs content won’t change.
sourcepub fn packed_refs_path(&self) -> PathBuf
pub fn packed_refs_path(&self) -> PathBuf
Return the path at which packed-refs would usually be stored
Examples found in repository?
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
pub(crate) fn packed_transaction(
&self,
lock_mode: git_lock::acquire::Fail,
) -> Result<packed::Transaction, transaction::Error> {
let lock = git_lock::File::acquire_to_update_resource(self.packed_refs_path(), lock_mode, None)?;
// We 'steal' the possibly existing packed buffer which may safe time if it's already there and fresh.
// If nothing else is happening, nobody will get to see the soon stale buffer either, but if so, they will pay
// for reloading it. That seems preferred over always loading up a new one.
Ok(packed::Transaction::new_from_pack_and_lock(
self.assure_packed_refs_uptodate()?,
lock,
))
}
/// Try to open a new packed buffer. It's not an error if it doesn't exist, but yields `Ok(None)`.
pub fn open_packed_buffer(&self) -> Result<Option<packed::Buffer>, packed::buffer::open::Error> {
let need_more_than_this_many_bytes_to_use_mmap = 32 * 1024;
match packed::Buffer::open(self.packed_refs_path(), need_more_than_this_many_bytes_to_use_mmap) {
Ok(buf) => Ok(Some(buf)),
Err(packed::buffer::open::Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(err),
}
}
/// Return a possibly cached packed buffer with shared ownership. At retrieval it will assure it's up to date, but
/// after that it can be considered a snapshot as it cannot change anymore.
///
/// Use this to make successive calls to [`file::Store::try_find_packed()`]
/// or obtain iterators using [`file::Store::iter_packed()`] in a way that assures the packed-refs content won't change.
pub fn cached_packed_buffer(
&self,
) -> Result<Option<file::packed::SharedBufferSnapshot>, packed::buffer::open::Error> {
self.assure_packed_refs_uptodate()
}
/// Return the path at which packed-refs would usually be stored
pub fn packed_refs_path(&self) -> PathBuf {
self.common_dir_resolved().join("packed-refs")
}
pub(crate) fn packed_refs_lock_path(&self) -> PathBuf {
let mut p = self.packed_refs_path();
p.set_extension("lock");
p
}
}
///
pub mod transaction {
use crate::store_impl::packed;
/// The error returned by [`file::Transaction::prepare()`][crate::file::Transaction::prepare()].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error("An existing pack couldn't be opened or read when preparing a transaction")]
BufferOpen(#[from] packed::buffer::open::Error),
#[error("The lock for a packed transaction could not be obtained")]
TransactionLock(#[from] git_lock::acquire::Error),
}
}
/// An up-to-date snapshot of the packed refs buffer.
pub type SharedBufferSnapshot = git_features::fs::SharedSnapshot<packed::Buffer>;
pub(crate) mod modifiable {
use git_features::threading::OwnShared;
use crate::{file, packed};
pub(crate) type MutableSharedBuffer = OwnShared<git_features::fs::MutableSnapshot<packed::Buffer>>;
impl file::Store {
pub(crate) fn force_refresh_packed_buffer(&self) -> Result<(), packed::buffer::open::Error> {
self.packed.force_refresh(|| {
let modified = self.packed_refs_path().metadata()?.modified()?;
self.open_packed_buffer().map(|packed| Some(modified).zip(packed))
})
}
pub(crate) fn assure_packed_refs_uptodate(
&self,
) -> Result<Option<super::SharedBufferSnapshot>, packed::buffer::open::Error> {
self.packed.recent_snapshot(
|| self.packed_refs_path().metadata().and_then(|m| m.modified()).ok(),
|| self.open_packed_buffer(),
)
}
More examples
206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392
pub fn prepare(
mut self,
edits: impl IntoIterator<Item = RefEdit>,
ref_files_lock_fail_mode: git_lock::acquire::Fail,
packed_refs_lock_fail_mode: git_lock::acquire::Fail,
) -> Result<Self, Error> {
assert!(self.updates.is_none(), "BUG: Must not call prepare(…) multiple times");
let store = self.store;
let mut updates: Vec<_> = edits
.into_iter()
.map(|update| Edit {
update,
lock: None,
parent_index: None,
leaf_referent_previous_oid: None,
})
.collect();
updates
.pre_process(
|name| {
let symbolic_refs_are_never_packed = None;
store
.find_existing_inner(name, symbolic_refs_are_never_packed)
.map(|r| r.target)
.ok()
},
|idx, update| Edit {
update,
lock: None,
parent_index: Some(idx),
leaf_referent_previous_oid: None,
},
)
.map_err(Error::PreprocessingFailed)?;
let mut maybe_updates_for_packed_refs = match self.packed_refs {
PackedRefs::DeletionsAndNonSymbolicUpdates(_)
| PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_) => Some(0_usize),
PackedRefs::DeletionsOnly => None,
};
if maybe_updates_for_packed_refs.is_some()
|| self.store.packed_refs_path().is_file()
|| self.store.packed_refs_lock_path().is_file()
{
let mut edits_for_packed_transaction = Vec::<RefEdit>::new();
let mut needs_packed_refs_lookups = false;
for edit in updates.iter() {
let log_mode = match edit.update.change {
Change::Update {
log: LogChange { mode, .. },
..
} => mode,
Change::Delete { log, .. } => log,
};
if log_mode == RefLog::Only {
continue;
}
let name = match possibly_adjust_name_for_prefixes(edit.update.name.as_ref()) {
Some(n) => n,
None => continue,
};
if let Some(ref mut num_updates) = maybe_updates_for_packed_refs {
if let Change::Update {
new: Target::Peeled(_), ..
} = edit.update.change
{
edits_for_packed_transaction.push(RefEdit {
name,
..edit.update.clone()
});
*num_updates += 1;
}
continue;
}
match edit.update.change {
Change::Update {
expected: PreviousValue::ExistingMustMatch(_) | PreviousValue::MustExistAndMatch(_),
..
} => needs_packed_refs_lookups = true,
Change::Delete { .. } => {
edits_for_packed_transaction.push(RefEdit {
name,
..edit.update.clone()
});
}
_ => {
needs_packed_refs_lookups = true;
}
}
}
if !edits_for_packed_transaction.is_empty() || needs_packed_refs_lookups {
// What follows means that we will only create a transaction if we have to access packed refs for looking
// up current ref values, or that we definitely have a transaction if we need to make updates. Otherwise
// we may have no transaction at all which isn't required if we had none and would only try making deletions.
let packed_transaction: Option<_> =
if maybe_updates_for_packed_refs.unwrap_or(0) > 0 || self.store.packed_refs_lock_path().is_file() {
// We have to create a packed-ref even if it doesn't exist
self.store
.packed_transaction(packed_refs_lock_fail_mode)
.map_err(|err| match err {
file::packed::transaction::Error::BufferOpen(err) => Error::from(err),
file::packed::transaction::Error::TransactionLock(err) => {
Error::PackedTransactionAcquire(err)
}
})?
.into()
} else {
// A packed transaction is optional - we only have deletions that can't be made if
// no packed-ref file exists anyway
self.store
.assure_packed_refs_uptodate()?
.map(|p| {
buffer_into_transaction(p, packed_refs_lock_fail_mode)
.map_err(Error::PackedTransactionAcquire)
})
.transpose()?
};
if let Some(transaction) = packed_transaction {
self.packed_transaction = Some(match &mut self.packed_refs {
PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(f)
| PackedRefs::DeletionsAndNonSymbolicUpdates(f) => {
transaction.prepare(edits_for_packed_transaction, f)?
}
PackedRefs::DeletionsOnly => transaction
.prepare(edits_for_packed_transaction, &mut |_, _| {
unreachable!("BUG: deletions never trigger object lookups")
})?,
});
}
}
}
for cid in 0..updates.len() {
let change = &mut updates[cid];
if let Err(err) = Self::lock_ref_and_apply_change(
self.store,
ref_files_lock_fail_mode,
self.packed_transaction.as_ref().and_then(|t| t.buffer()),
change,
self.packed_transaction.is_some(),
matches!(
self.packed_refs,
PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_)
),
) {
let err = match err {
Error::LockAcquire {
source,
full_name: _bogus,
} => Error::LockAcquire {
source,
full_name: {
let mut cursor = change.parent_index;
let mut ref_name = change.name();
while let Some(parent_idx) = cursor {
let parent = &updates[parent_idx];
if parent.parent_index.is_none() {
ref_name = parent.name();
} else {
cursor = parent.parent_index;
}
}
ref_name
},
},
other => other,
};
return Err(err);
};
// traverse parent chain from leaf/peeled ref and set the leaf previous oid accordingly
// to help with their reflog entries
if let (Some(crate::TargetRef::Peeled(oid)), Some(parent_idx)) =
(change.update.change.previous_value(), change.parent_index)
{
let oid = oid.to_owned();
let mut parent_idx_cursor = Some(parent_idx);
while let Some(parent) = parent_idx_cursor.take().map(|idx| &mut updates[idx]) {
parent_idx_cursor = parent.parent_index;
parent.leaf_referent_previous_oid = Some(oid);
}
}
}
self.updates = Some(updates);
Ok(self)
}