Struct git_repository::Reference
source · pub struct Reference<'r> {
pub inner: Reference,
/* private fields */
}
Expand description
A reference that points to an object or reference, with access to its source repository.
Note that these are snapshots and won’t recognize if they are stale.
Fields§
§inner: Reference
The actual reference data
Implementations§
source§impl<'repo> Reference<'repo>
impl<'repo> Reference<'repo>
Remotes
sourcepub fn remote_name(&self, direction: Direction) -> Option<Name<'repo>>
pub fn remote_name(&self, direction: Direction) -> Option<Name<'repo>>
Find the unvalidated name of our remote for direction
as configured in branch.<name>.remote|pushRemote
respectively.
If Some(<name>)
it can be used in Repository::find_remote(…)
, or if None
then
Repository::remote_default_name() could be used in its place.
Return None
if no remote is configured.
Note
- it’s recommended to use the
remote(…)
method as it will configure the remote with additional information. branch.<name>.pushRemote
falls back tobranch.<name>.remote
.
Examples found in repository?
37 38 39 40 41 42 43 44 45 46 47 48 49 50
pub fn remote(
&self,
direction: remote::Direction,
) -> Option<Result<crate::Remote<'repo>, remote::find::existing::Error>> {
// TODO: use `branch.<name>.merge`
self.remote_name(direction).map(|name| match name {
remote::Name::Symbol(name) => self.repo.find_remote(name.as_ref()).map_err(Into::into),
remote::Name::Url(url) => git_url::parse(url.as_ref()).map_err(Into::into).and_then(|url| {
self.repo
.remote_at(url)
.map_err(|err| remote::find::existing::Error::Find(remote::find::Error::Init(err)))
}),
})
}
sourcepub fn remote(
&self,
direction: Direction
) -> Option<Result<Remote<'repo>, Error>>
pub fn remote(
&self,
direction: Direction
) -> Option<Result<Remote<'repo>, Error>>
Like remote_name(…)
, but configures the returned Remote
with additional information like
branch.<name>.merge
to know which branch on the remote side corresponds to this one for merging when pulling.
It also handles if the remote is a configured URL, which has no name.
source§impl<'repo> Reference<'repo>
impl<'repo> Reference<'repo>
sourcepub fn log_iter(&self) -> Platform<'_, '_>
pub fn log_iter(&self) -> Platform<'_, '_>
Return a platform for obtaining iterators over reference logs.
Examples found in repository?
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
fn reflog(&mut self, query: ReflogLookup) -> Option<()> {
self.unset_disambiguate_call();
match query {
ReflogLookup::Date(_date) => {
self.err.push(Error::Planned {
dependency: "remote handling and ref-specs are fleshed out more",
});
None
}
ReflogLookup::Entry(no) => {
let r = match &mut self.refs[self.idx] {
Some(r) => r.clone().attach(self.repo),
val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
Ok(Some(r)) => {
*val = Some(r.clone().detach());
r
}
Ok(None) => {
self.err.push(Error::UnbornHeadsHaveNoRefLog);
return None;
}
Err(err) => {
self.err.push(err.into());
return None;
}
},
};
let mut platform = r.log_iter();
match platform.rev().ok().flatten() {
Some(mut it) => match it.nth(no).and_then(Result::ok) {
Some(line) => {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(line.new_oid);
Some(())
}
None => {
let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
self.err.push(Error::RefLogEntryOutOfRange {
reference: r.detach(),
desired: no,
available,
});
None
}
},
None => {
self.err.push(Error::MissingRefLog {
reference: r.name().as_bstr().into(),
action: "lookup entry",
});
None
}
}
}
}
}
source§impl<'repo> Reference<'repo>
impl<'repo> Reference<'repo>
sourcepub fn set_target_id(
&mut self,
id: impl Into<ObjectId>,
reflog_message: impl Into<BString>
) -> Result<(), Error>
pub fn set_target_id(
&mut self,
id: impl Into<ObjectId>,
reflog_message: impl Into<BString>
) -> Result<(), Error>
Set the id of this direct reference to id
and use reflog_message
for the reflog (if enabled in the repository).
Note that the operation will fail on symbolic references, to change their type use the lower level reference database, or if the reference was deleted or changed in the mean time. Furthermore, refrain from using this method for more than a one-off change as it creates a transaction for each invocation. If multiple reference should be changed, use Repository::edit_references() or the lower level reference database instead.
source§impl<'repo> Reference<'repo>
impl<'repo> Reference<'repo>
Access
sourcepub fn try_id(&self) -> Option<Id<'repo>>
pub fn try_id(&self) -> Option<Id<'repo>>
Returns the attached id we point to, or None
if this is a symbolic ref.
Examples found in repository?
More examples
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
fn names(&self, repo: &Repository) -> Result<HashMap<ObjectId, Cow<'static, BStr>>, Error> {
let platform = repo.references()?;
Ok(match self {
SelectRef::AllTags | SelectRef::AllRefs => {
let mut refs: Vec<_> = match self {
SelectRef::AllRefs => platform.all()?,
SelectRef::AllTags => platform.tags()?,
_ => unreachable!(),
}
.filter_map(Result::ok)
.filter_map(|mut r: crate::Reference<'_>| {
let target_id = r.target().try_id().map(ToOwned::to_owned);
let peeled_id = r.peel_to_id_in_place().ok()?;
let (prio, tag_time) = match target_id {
Some(target_id) if peeled_id != *target_id => {
let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
(1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
}
_ => (0, 0),
};
(
peeled_id.inner,
prio,
tag_time,
Cow::from(r.inner.name.shorten().to_owned()),
)
.into()
})
.collect();
// By priority, then by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
refs.sort_by(
|(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| {
a_prio
.cmp(b_prio)
.then_with(|| a_time.cmp(b_time))
.then_with(|| b_name.cmp(a_name))
},
);
refs.into_iter().map(|(a, _, _, b)| (a, b)).collect()
}
SelectRef::AnnotatedTags => {
let mut peeled_commits_and_tag_date: Vec<_> = platform
.tags()?
.filter_map(Result::ok)
.filter_map(|r: crate::Reference<'_>| {
// TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
// so rather follow symrefs till the first object and then peel tags after the first object was found.
let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
let tag_time = tag
.tagger()
.ok()
.and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
.unwrap_or(0);
let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
})
.collect();
// Sort by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| {
a_time.cmp(b_time).then_with(|| b_name.cmp(a_name))
});
peeled_commits_and_tag_date
.into_iter()
.map(|(a, _, c)| (a, c))
.collect()
}
})
}
sourcepub fn id(&self) -> Id<'repo>
pub fn id(&self) -> Id<'repo>
Returns the attached id we point to, or panic if this is a symbolic ref.
Examples found in repository?
156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> {
self.unset_disambiguate_call();
self.follow_refs_to_objects_if_needed()?;
#[cfg(not(feature = "regex"))]
let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
#[cfg(feature = "regex")]
let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
Ok(compiled) => {
let needs_regex = regex::escape(compiled.as_str()) != regex;
move |message: &BStr| -> bool {
if needs_regex {
compiled.is_match(message) ^ negated
} else {
message.contains_str(regex) ^ negated
}
}
}
Err(err) => {
self.err.push(err.into());
return None;
}
};
match self.objs[self.idx].as_mut() {
Some(objs) => {
let repo = self.repo;
let mut errors = Vec::new();
let mut replacements = Replacements::default();
for oid in objs.iter() {
match oid
.attach(repo)
.ancestors()
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
replacements.push((*oid, commit.id));
matched = true;
break;
}
}
Err(err) => errors.push((*oid, err)),
}
}
if !matched {
errors.push((
*oid,
Error::NoRegexMatch {
regex: regex.into(),
commits_searched: count,
oid: oid.attach(repo).shorten_or_id(),
},
))
}
}
Err(err) => errors.push((*oid, err.into())),
}
}
handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
}
None => match self.repo.references() {
Ok(references) => match references.all() {
Ok(references) => {
match self
.repo
.rev_walk(
references
.peeled()
.filter_map(Result::ok)
.filter(|r| {
r.id()
.object()
.ok()
.map(|obj| obj.kind == git_object::Kind::Commit)
.unwrap_or(false)
})
.filter_map(|r| r.detach().peeled),
)
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(commit.id);
matched = true;
break;
}
}
Err(err) => self.err.push(err),
}
}
if matched {
Some(())
} else {
self.err.push(Error::NoRegexMatchAllRefs {
regex: regex.into(),
commits_searched: count,
});
None
}
}
Err(err) => {
self.err.push(err.into());
None
}
}
}
Err(err) => {
self.err.push(err.into());
None
}
},
Err(err) => {
self.err.push(err.into());
None
}
},
}
}
sourcepub fn target(&self) -> TargetRef<'_>
pub fn target(&self) -> TargetRef<'_>
Return the target to which this reference points to.
Examples found in repository?
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
pub(crate) fn one_round(
algo: Algorithm,
round: usize,
repo: &crate::Repository,
ref_map: &crate::remote::fetch::RefMap,
fetch_tags: crate::remote::fetch::Tags,
arguments: &mut git_protocol::fetch::Arguments,
_previous_response: Option<&git_protocol::fetch::Response>,
) -> Result<bool, Error> {
let tag_refspec_to_ignore = fetch_tags
.to_refspec()
.filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
match algo {
Algorithm::Naive => {
assert_eq!(round, 1, "Naive always finishes after the first round, and claims.");
let mut has_missing_tracking_branch = false;
for mapping in &ref_map.mappings {
if tag_refspec_to_ignore.map_or(false, |tag_spec| {
mapping
.spec_index
.implicit_index()
.and_then(|idx| ref_map.extra_refspecs.get(idx))
.map_or(false, |spec| spec.to_ref() == tag_spec)
}) {
continue;
}
let have_id = mapping.local.as_ref().and_then(|name| {
repo.find_reference(name)
.ok()
.and_then(|r| r.target().try_id().map(ToOwned::to_owned))
});
match have_id {
Some(have_id) => {
if let Some(want_id) = mapping.remote.as_id() {
if want_id != have_id {
arguments.want(want_id);
arguments.have(have_id);
}
}
}
None => {
if let Some(want_id) = mapping.remote.as_id() {
arguments.want(want_id);
has_missing_tracking_branch = true;
}
}
}
}
if has_missing_tracking_branch {
if let Ok(Some(r)) = repo.head_ref() {
if let Some(id) = r.target().try_id() {
arguments.have(id);
}
}
}
Ok(true)
}
}
}
More examples
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
fn names(&self, repo: &Repository) -> Result<HashMap<ObjectId, Cow<'static, BStr>>, Error> {
let platform = repo.references()?;
Ok(match self {
SelectRef::AllTags | SelectRef::AllRefs => {
let mut refs: Vec<_> = match self {
SelectRef::AllRefs => platform.all()?,
SelectRef::AllTags => platform.tags()?,
_ => unreachable!(),
}
.filter_map(Result::ok)
.filter_map(|mut r: crate::Reference<'_>| {
let target_id = r.target().try_id().map(ToOwned::to_owned);
let peeled_id = r.peel_to_id_in_place().ok()?;
let (prio, tag_time) = match target_id {
Some(target_id) if peeled_id != *target_id => {
let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
(1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
}
_ => (0, 0),
};
(
peeled_id.inner,
prio,
tag_time,
Cow::from(r.inner.name.shorten().to_owned()),
)
.into()
})
.collect();
// By priority, then by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
refs.sort_by(
|(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| {
a_prio
.cmp(b_prio)
.then_with(|| a_time.cmp(b_time))
.then_with(|| b_name.cmp(a_name))
},
);
refs.into_iter().map(|(a, _, _, b)| (a, b)).collect()
}
SelectRef::AnnotatedTags => {
let mut peeled_commits_and_tag_date: Vec<_> = platform
.tags()?
.filter_map(Result::ok)
.filter_map(|r: crate::Reference<'_>| {
// TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
// so rather follow symrefs till the first object and then peel tags after the first object was found.
let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
let tag_time = tag
.tagger()
.ok()
.and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
.unwrap_or(0);
let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
})
.collect();
// Sort by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| {
a_time.cmp(b_time).then_with(|| b_name.cmp(a_name))
});
peeled_commits_and_tag_date
.into_iter()
.map(|(a, _, c)| (a, c))
.collect()
}
})
}
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
pub(crate) fn update(
repo: &Repository,
message: RefLogMessage,
mappings: &[fetch::Mapping],
refspecs: &[git_refspec::RefSpec],
extra_refspecs: &[git_refspec::RefSpec],
fetch_tags: fetch::Tags,
dry_run: fetch::DryRun,
write_packed_refs: fetch::WritePackedRefs,
) -> Result<update::Outcome, update::Error> {
let mut edits = Vec::new();
let mut updates = Vec::new();
let implicit_tag_refspec = fetch_tags
.to_refspec()
.filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
|fetch::Mapping {
remote,
local,
spec_index,
}| {
spec_index.get(refspecs, extra_refspecs).map(|spec| {
(
remote,
local,
spec,
implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
)
})
},
) {
let remote_id = match remote.as_id() {
Some(id) => id,
None => continue,
};
if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
let update = if is_implicit_tag {
update::Mode::ImplicitTagNotSentByRemote.into()
} else {
update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
};
updates.push(update);
continue;
}
let checked_out_branches = worktree_branches(repo)?;
let (mode, edit_index) = match local {
Some(name) => {
let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
Some(existing) => {
if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
let mode = update::Mode::RejectedCurrentlyCheckedOut {
worktree_dir: wt_dir.to_owned(),
};
updates.push(mode.into());
continue;
}
match existing.target() {
TargetRef::Symbolic(_) => {
updates.push(update::Mode::RejectedSymbolic.into());
continue;
}
TargetRef::Peeled(local_id) => {
let previous_value =
PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
let (mode, reflog_message) = if local_id == remote_id {
(update::Mode::NoChangeNeeded, "no update will be performed")
} else if let Some(git_ref::Category::Tag) = existing.name().category() {
if spec.allow_non_fast_forward() {
(update::Mode::Forced, "updating tag")
} else {
updates.push(update::Mode::RejectedTagUpdate.into());
continue;
}
} else {
let mut force = spec.allow_non_fast_forward();
let is_fast_forward = match dry_run {
fetch::DryRun::No => {
let ancestors = repo
.find_object(local_id)?
.try_into_commit()
.map_err(|_| ())
.and_then(|c| {
c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
}).and_then(|local_commit_time|
remote_id
.to_owned()
.ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
.sorting(
git_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
time_in_seconds_since_epoch: local_commit_time
},
)
.map_err(|_| ())
);
match ancestors {
Ok(mut ancestors) => {
ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
}
Err(_) => {
force = true;
false
}
}
}
fetch::DryRun::Yes => true,
};
if is_fast_forward {
(
update::Mode::FastForward,
matches!(dry_run, fetch::DryRun::Yes)
.then(|| "fast-forward (guessed in dry-run)")
.unwrap_or("fast-forward"),
)
} else if force {
(update::Mode::Forced, "forced-update")
} else {
updates.push(update::Mode::RejectedNonFastForward.into());
continue;
}
};
(mode, reflog_message, existing.name().to_owned(), previous_value)
}
}
}
None => {
let name: git_ref::FullName = name.try_into()?;
let reflog_msg = match name.category() {
Some(git_ref::Category::Tag) => "storing tag",
Some(git_ref::Category::LocalBranch) => "storing head",
_ => "storing ref",
};
(
update::Mode::New,
reflog_msg,
name,
PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
)
}
};
let edit = RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: message.compose(reflog_message),
},
expected: previous_value,
new: if let Source::Ref(git_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
match mappings.iter().find_map(|m| {
m.remote.as_name().and_then(|name| {
(name == target)
.then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
.flatten()
})
}) {
Some(local_branch) => {
// This is always safe because…
// - the reference may exist already
// - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
// - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
// target reference still exists and we can point to it.
Target::Symbolic(local_branch)
}
None => Target::Peeled(remote_id.into()),
}
} else {
Target::Peeled(remote_id.into())
},
},
name,
deref: false,
};
let edit_index = edits.len();
edits.push(edit);
(mode, Some(edit_index))
}
None => (update::Mode::NoChangeNeeded, None),
};
updates.push(Update { mode, edit_index })
}
let edits = match dry_run {
fetch::DryRun::No => {
let (file_lock_fail, packed_refs_lock_fail) = repo
.config
.lock_timeout()
.map_err(crate::reference::edit::Error::from)?;
repo.refs
.transaction()
.packed_refs(
match write_packed_refs {
fetch::WritePackedRefs::Only => {
git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
repo.objects
.try_find(oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}))},
fetch::WritePackedRefs::Never => git_ref::file::transaction::PackedRefs::DeletionsOnly
}
)
.prepare(edits, file_lock_fail, packed_refs_lock_fail)
.map_err(crate::reference::edit::Error::from)?
.commit(repo.committer_or_default())
.map_err(crate::reference::edit::Error::from)?
}
fetch::DryRun::Yes => edits,
};
Ok(update::Outcome { edits, updates })
}
sourcepub fn name(&self) -> &FullNameRef
pub fn name(&self) -> &FullNameRef
Return the reference’s full name.
Examples found in repository?
18 19 20 21 22 23 24 25 26 27 28 29 30
pub fn remote_name(&self, direction: remote::Direction) -> Option<remote::Name<'repo>> {
let name = self.name().shorten();
let config = &self.repo.config.resolved;
(direction == remote::Direction::Push)
.then(|| {
config
.string("branch", Some(name), "pushRemote")
.or_else(|| config.string("remote", None, "pushDefault"))
})
.flatten()
.or_else(|| config.string("branch", Some(name), "remote"))
.and_then(|name| name.try_into().ok())
}
More examples
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
pub fn set_target_id(
&mut self,
id: impl Into<git_hash::ObjectId>,
reflog_message: impl Into<BString>,
) -> Result<(), Error> {
match &self.inner.target {
Target::Symbolic(name) => return Err(Error::SymbolicReference { name: name.clone() }),
Target::Peeled(current_id) => {
let changed = self.repo.reference(
self.name(),
id,
PreviousValue::MustExistAndMatch(Target::Peeled(current_id.to_owned())),
reflog_message,
)?;
*self = changed;
}
}
Ok(())
}
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
fn reflog(&mut self, query: ReflogLookup) -> Option<()> {
self.unset_disambiguate_call();
match query {
ReflogLookup::Date(_date) => {
self.err.push(Error::Planned {
dependency: "remote handling and ref-specs are fleshed out more",
});
None
}
ReflogLookup::Entry(no) => {
let r = match &mut self.refs[self.idx] {
Some(r) => r.clone().attach(self.repo),
val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
Ok(Some(r)) => {
*val = Some(r.clone().detach());
r
}
Ok(None) => {
self.err.push(Error::UnbornHeadsHaveNoRefLog);
return None;
}
Err(err) => {
self.err.push(err.into());
return None;
}
},
};
let mut platform = r.log_iter();
match platform.rev().ok().flatten() {
Some(mut it) => match it.nth(no).and_then(Result::ok) {
Some(line) => {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(line.new_oid);
Some(())
}
None => {
let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
self.err.push(Error::RefLogEntryOutOfRange {
reference: r.detach(),
desired: no,
available,
});
None
}
},
None => {
self.err.push(Error::MissingRefLog {
reference: r.name().as_bstr().into(),
action: "lookup entry",
});
None
}
}
}
}
}
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
fn names(&self, repo: &Repository) -> Result<HashMap<ObjectId, Cow<'static, BStr>>, Error> {
let platform = repo.references()?;
Ok(match self {
SelectRef::AllTags | SelectRef::AllRefs => {
let mut refs: Vec<_> = match self {
SelectRef::AllRefs => platform.all()?,
SelectRef::AllTags => platform.tags()?,
_ => unreachable!(),
}
.filter_map(Result::ok)
.filter_map(|mut r: crate::Reference<'_>| {
let target_id = r.target().try_id().map(ToOwned::to_owned);
let peeled_id = r.peel_to_id_in_place().ok()?;
let (prio, tag_time) = match target_id {
Some(target_id) if peeled_id != *target_id => {
let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
(1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
}
_ => (0, 0),
};
(
peeled_id.inner,
prio,
tag_time,
Cow::from(r.inner.name.shorten().to_owned()),
)
.into()
})
.collect();
// By priority, then by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
refs.sort_by(
|(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| {
a_prio
.cmp(b_prio)
.then_with(|| a_time.cmp(b_time))
.then_with(|| b_name.cmp(a_name))
},
);
refs.into_iter().map(|(a, _, _, b)| (a, b)).collect()
}
SelectRef::AnnotatedTags => {
let mut peeled_commits_and_tag_date: Vec<_> = platform
.tags()?
.filter_map(Result::ok)
.filter_map(|r: crate::Reference<'_>| {
// TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
// so rather follow symrefs till the first object and then peel tags after the first object was found.
let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
let tag_time = tag
.tagger()
.ok()
.and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
.unwrap_or(0);
let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
})
.collect();
// Sort by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| {
a_time.cmp(b_time).then_with(|| b_name.cmp(a_name))
});
peeled_commits_and_tag_date
.into_iter()
.map(|(a, _, c)| (a, c))
.collect()
}
})
}
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
pub(crate) fn update(
repo: &Repository,
message: RefLogMessage,
mappings: &[fetch::Mapping],
refspecs: &[git_refspec::RefSpec],
extra_refspecs: &[git_refspec::RefSpec],
fetch_tags: fetch::Tags,
dry_run: fetch::DryRun,
write_packed_refs: fetch::WritePackedRefs,
) -> Result<update::Outcome, update::Error> {
let mut edits = Vec::new();
let mut updates = Vec::new();
let implicit_tag_refspec = fetch_tags
.to_refspec()
.filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
|fetch::Mapping {
remote,
local,
spec_index,
}| {
spec_index.get(refspecs, extra_refspecs).map(|spec| {
(
remote,
local,
spec,
implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
)
})
},
) {
let remote_id = match remote.as_id() {
Some(id) => id,
None => continue,
};
if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
let update = if is_implicit_tag {
update::Mode::ImplicitTagNotSentByRemote.into()
} else {
update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
};
updates.push(update);
continue;
}
let checked_out_branches = worktree_branches(repo)?;
let (mode, edit_index) = match local {
Some(name) => {
let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
Some(existing) => {
if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
let mode = update::Mode::RejectedCurrentlyCheckedOut {
worktree_dir: wt_dir.to_owned(),
};
updates.push(mode.into());
continue;
}
match existing.target() {
TargetRef::Symbolic(_) => {
updates.push(update::Mode::RejectedSymbolic.into());
continue;
}
TargetRef::Peeled(local_id) => {
let previous_value =
PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
let (mode, reflog_message) = if local_id == remote_id {
(update::Mode::NoChangeNeeded, "no update will be performed")
} else if let Some(git_ref::Category::Tag) = existing.name().category() {
if spec.allow_non_fast_forward() {
(update::Mode::Forced, "updating tag")
} else {
updates.push(update::Mode::RejectedTagUpdate.into());
continue;
}
} else {
let mut force = spec.allow_non_fast_forward();
let is_fast_forward = match dry_run {
fetch::DryRun::No => {
let ancestors = repo
.find_object(local_id)?
.try_into_commit()
.map_err(|_| ())
.and_then(|c| {
c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
}).and_then(|local_commit_time|
remote_id
.to_owned()
.ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
.sorting(
git_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
time_in_seconds_since_epoch: local_commit_time
},
)
.map_err(|_| ())
);
match ancestors {
Ok(mut ancestors) => {
ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
}
Err(_) => {
force = true;
false
}
}
}
fetch::DryRun::Yes => true,
};
if is_fast_forward {
(
update::Mode::FastForward,
matches!(dry_run, fetch::DryRun::Yes)
.then(|| "fast-forward (guessed in dry-run)")
.unwrap_or("fast-forward"),
)
} else if force {
(update::Mode::Forced, "forced-update")
} else {
updates.push(update::Mode::RejectedNonFastForward.into());
continue;
}
};
(mode, reflog_message, existing.name().to_owned(), previous_value)
}
}
}
None => {
let name: git_ref::FullName = name.try_into()?;
let reflog_msg = match name.category() {
Some(git_ref::Category::Tag) => "storing tag",
Some(git_ref::Category::LocalBranch) => "storing head",
_ => "storing ref",
};
(
update::Mode::New,
reflog_msg,
name,
PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
)
}
};
let edit = RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: message.compose(reflog_message),
},
expected: previous_value,
new: if let Source::Ref(git_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
match mappings.iter().find_map(|m| {
m.remote.as_name().and_then(|name| {
(name == target)
.then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
.flatten()
})
}) {
Some(local_branch) => {
// This is always safe because…
// - the reference may exist already
// - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
// - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
// target reference still exists and we can point to it.
Target::Symbolic(local_branch)
}
None => Target::Peeled(remote_id.into()),
}
} else {
Target::Peeled(remote_id.into())
},
},
name,
deref: false,
};
let edit_index = edits.len();
edits.push(edit);
(mode, Some(edit_index))
}
None => (update::Mode::NoChangeNeeded, None),
};
updates.push(Update { mode, edit_index })
}
let edits = match dry_run {
fetch::DryRun::No => {
let (file_lock_fail, packed_refs_lock_fail) = repo
.config
.lock_timeout()
.map_err(crate::reference::edit::Error::from)?;
repo.refs
.transaction()
.packed_refs(
match write_packed_refs {
fetch::WritePackedRefs::Only => {
git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
repo.objects
.try_find(oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}))},
fetch::WritePackedRefs::Never => git_ref::file::transaction::PackedRefs::DeletionsOnly
}
)
.prepare(edits, file_lock_fail, packed_refs_lock_fail)
.map_err(crate::reference::edit::Error::from)?
.commit(repo.committer_or_default())
.map_err(crate::reference::edit::Error::from)?
}
fetch::DryRun::Yes => edits,
};
Ok(update::Outcome { edits, updates })
}
sourcepub fn detach(self) -> Reference
pub fn detach(self) -> Reference
Turn this instances into a stand-alone reference.
Examples found in repository?
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160
pub fn head(&self) -> Result<crate::Head<'_>, reference::find::existing::Error> {
let head = self.find_reference("HEAD")?;
Ok(match head.inner.target {
Target::Symbolic(branch) => match self.find_reference(&branch) {
Ok(r) => crate::head::Kind::Symbolic(r.detach()),
Err(reference::find::existing::Error::NotFound) => crate::head::Kind::Unborn(branch),
Err(err) => return Err(err),
},
Target::Peeled(target) => crate::head::Kind::Detached {
target,
peeled: head.inner.peeled,
},
}
.attach(self))
}
More examples
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
pub fn peel_to_id_in_place(&mut self) -> Option<Result<crate::Id<'repo>, Error>> {
Some(match &mut self.kind {
Kind::Unborn(_name) => return None,
Kind::Detached {
peeled: Some(peeled), ..
} => Ok((*peeled).attach(self.repo)),
Kind::Detached { peeled: None, target } => {
match target
.attach(self.repo)
.object()
.map_err(Into::into)
.and_then(|obj| obj.peel_tags_to_end().map_err(Into::into))
.map(|peeled| peeled.id)
{
Ok(peeled) => {
self.kind = Kind::Detached {
peeled: Some(peeled),
target: *target,
};
Ok(peeled.attach(self.repo))
}
Err(err) => Err(err),
}
}
Kind::Symbolic(r) => {
let mut nr = r.clone().attach(self.repo);
let peeled = nr.peel_to_id_in_place().map_err(Into::into);
*r = nr.detach();
peeled
}
})
}
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
fn reflog(&mut self, query: ReflogLookup) -> Option<()> {
self.unset_disambiguate_call();
match query {
ReflogLookup::Date(_date) => {
self.err.push(Error::Planned {
dependency: "remote handling and ref-specs are fleshed out more",
});
None
}
ReflogLookup::Entry(no) => {
let r = match &mut self.refs[self.idx] {
Some(r) => r.clone().attach(self.repo),
val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
Ok(Some(r)) => {
*val = Some(r.clone().detach());
r
}
Ok(None) => {
self.err.push(Error::UnbornHeadsHaveNoRefLog);
return None;
}
Err(err) => {
self.err.push(err.into());
return None;
}
},
};
let mut platform = r.log_iter();
match platform.rev().ok().flatten() {
Some(mut it) => match it.nth(no).and_then(Result::ok) {
Some(line) => {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(line.new_oid);
Some(())
}
None => {
let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
self.err.push(Error::RefLogEntryOutOfRange {
reference: r.detach(),
desired: no,
available,
});
None
}
},
None => {
self.err.push(Error::MissingRefLog {
reference: r.name().as_bstr().into(),
action: "lookup entry",
});
None
}
}
}
}
}
fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> {
self.unset_disambiguate_call();
fn prior_checkouts_iter<'a>(
platform: &'a mut git_ref::file::log::iter::Platform<'static, '_>,
) -> Result<impl Iterator<Item = (BString, ObjectId)> + 'a, Error> {
match platform.rev().ok().flatten() {
Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| {
line.message
.strip_prefix(b"checkout: moving from ")
.and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
.map(|from_branch| (from_branch.into(), line.previous_oid))
})),
None => Err(Error::MissingRefLog {
reference: "HEAD".into(),
action: "search prior checked out branch",
}),
}
}
let head = match self.repo.head() {
Ok(head) => head,
Err(err) => {
self.err.push(err.into());
return None;
}
};
match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) {
Ok(Some((ref_name, id))) => {
let id = match self.repo.find_reference(ref_name.as_bstr()) {
Ok(mut r) => {
let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
self.refs[self.idx] = Some(r.detach());
id
}
Err(_) => id,
};
self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id);
Some(())
}
Ok(None) => {
self.err.push(Error::PriorCheckoutOutOfRange {
desired: branch_no,
available: prior_checkouts_iter(&mut head.log_iter())
.map(|it| it.count())
.unwrap_or(0),
});
None
}
Err(err) => {
self.err.push(err);
None
}
}
}
156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> {
self.unset_disambiguate_call();
self.follow_refs_to_objects_if_needed()?;
#[cfg(not(feature = "regex"))]
let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
#[cfg(feature = "regex")]
let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
Ok(compiled) => {
let needs_regex = regex::escape(compiled.as_str()) != regex;
move |message: &BStr| -> bool {
if needs_regex {
compiled.is_match(message) ^ negated
} else {
message.contains_str(regex) ^ negated
}
}
}
Err(err) => {
self.err.push(err.into());
return None;
}
};
match self.objs[self.idx].as_mut() {
Some(objs) => {
let repo = self.repo;
let mut errors = Vec::new();
let mut replacements = Replacements::default();
for oid in objs.iter() {
match oid
.attach(repo)
.ancestors()
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
replacements.push((*oid, commit.id));
matched = true;
break;
}
}
Err(err) => errors.push((*oid, err)),
}
}
if !matched {
errors.push((
*oid,
Error::NoRegexMatch {
regex: regex.into(),
commits_searched: count,
oid: oid.attach(repo).shorten_or_id(),
},
))
}
}
Err(err) => errors.push((*oid, err.into())),
}
}
handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
}
None => match self.repo.references() {
Ok(references) => match references.all() {
Ok(references) => {
match self
.repo
.rev_walk(
references
.peeled()
.filter_map(Result::ok)
.filter(|r| {
r.id()
.object()
.ok()
.map(|obj| obj.kind == git_object::Kind::Commit)
.unwrap_or(false)
})
.filter_map(|r| r.detach().peeled),
)
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(commit.id);
matched = true;
break;
}
}
Err(err) => self.err.push(err),
}
}
if matched {
Some(())
} else {
self.err.push(Error::NoRegexMatchAllRefs {
regex: regex.into(),
commits_searched: count,
});
None
}
}
Err(err) => {
self.err.push(err.into());
None
}
}
}
Err(err) => {
self.err.push(err.into());
None
}
},
Err(err) => {
self.err.push(err.into());
None
}
},
}
}
source§impl<'repo> Reference<'repo>
impl<'repo> Reference<'repo>
sourcepub fn peel_to_id_in_place(&mut self) -> Result<Id<'repo>, Error>
pub fn peel_to_id_in_place(&mut self) -> Result<Id<'repo>, Error>
Follow all symbolic targets this reference might point to and peel the underlying object to the end of the chain, and return it.
This is useful to learn where this reference is ulitmately pointing to.
Examples found in repository?
More examples
202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219
fn follow_refs_to_objects_if_needed(&mut self) -> Option<()> {
assert_eq!(self.refs.len(), self.objs.len());
let repo = self.repo;
for (r, obj) in self.refs.iter().zip(self.objs.iter_mut()) {
if let (_ref_opt @ Some(ref_), obj_opt @ None) = (r, obj) {
if let Some(id) = ref_.target.try_id().map(ToOwned::to_owned).or_else(|| {
ref_.clone()
.attach(repo)
.peel_to_id_in_place()
.ok()
.map(|id| id.detach())
}) {
obj_opt.get_or_insert_with(HashSet::default).insert(id);
};
};
}
Some(())
}
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
pub fn peel_to_id_in_place(&mut self) -> Option<Result<crate::Id<'repo>, Error>> {
Some(match &mut self.kind {
Kind::Unborn(_name) => return None,
Kind::Detached {
peeled: Some(peeled), ..
} => Ok((*peeled).attach(self.repo)),
Kind::Detached { peeled: None, target } => {
match target
.attach(self.repo)
.object()
.map_err(Into::into)
.and_then(|obj| obj.peel_tags_to_end().map_err(Into::into))
.map(|peeled| peeled.id)
{
Ok(peeled) => {
self.kind = Kind::Detached {
peeled: Some(peeled),
target: *target,
};
Ok(peeled.attach(self.repo))
}
Err(err) => Err(err),
}
}
Kind::Symbolic(r) => {
let mut nr = r.clone().attach(self.repo);
let peeled = nr.peel_to_id_in_place().map_err(Into::into);
*r = nr.detach();
peeled
}
})
}
// TODO: tests
// TODO: something similar in `crate::Reference`
/// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no
/// more object to follow, transform the id into a commit if possible and return that.
///
/// Returns an error if the head is unborn or if it doesn't point to a commit.
pub fn peel_to_commit_in_place(&mut self) -> Result<crate::Commit<'repo>, to_commit::Error> {
let id = self.peel_to_id_in_place().ok_or_else(|| to_commit::Error::Unborn {
name: self.referent_name().expect("unborn").to_owned(),
})??;
id.object()
.map_err(|err| to_commit::Error::Peel(Error::FindExistingObject(err)))
.and_then(|object| object.try_into_commit().map_err(Into::into))
}
/// Consume this instance and transform it into the final object that it points to, or `None` if the `HEAD`
/// reference is yet to be born.
pub fn into_fully_peeled_id(self) -> Option<Result<crate::Id<'repo>, Error>> {
Some(match self.kind {
Kind::Unborn(_name) => return None,
Kind::Detached {
peeled: Some(peeled), ..
} => Ok(peeled.attach(self.repo)),
Kind::Detached { peeled: None, target } => target
.attach(self.repo)
.object()
.map_err(Into::into)
.and_then(|obj| obj.peel_tags_to_end().map_err(Into::into))
.map(|obj| obj.id.attach(self.repo)),
Kind::Symbolic(r) => r.attach(self.repo).peel_to_id_in_place().map_err(Into::into),
})
}
163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> {
self.unset_disambiguate_call();
fn prior_checkouts_iter<'a>(
platform: &'a mut git_ref::file::log::iter::Platform<'static, '_>,
) -> Result<impl Iterator<Item = (BString, ObjectId)> + 'a, Error> {
match platform.rev().ok().flatten() {
Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| {
line.message
.strip_prefix(b"checkout: moving from ")
.and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
.map(|from_branch| (from_branch.into(), line.previous_oid))
})),
None => Err(Error::MissingRefLog {
reference: "HEAD".into(),
action: "search prior checked out branch",
}),
}
}
let head = match self.repo.head() {
Ok(head) => head,
Err(err) => {
self.err.push(err.into());
return None;
}
};
match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) {
Ok(Some((ref_name, id))) => {
let id = match self.repo.find_reference(ref_name.as_bstr()) {
Ok(mut r) => {
let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
self.refs[self.idx] = Some(r.detach());
id
}
Err(_) => id,
};
self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id);
Some(())
}
Ok(None) => {
self.err.push(Error::PriorCheckoutOutOfRange {
desired: branch_no,
available: prior_checkouts_iter(&mut head.log_iter())
.map(|it| it.count())
.unwrap_or(0),
});
None
}
Err(err) => {
self.err.push(err);
None
}
}
}
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
fn names(&self, repo: &Repository) -> Result<HashMap<ObjectId, Cow<'static, BStr>>, Error> {
let platform = repo.references()?;
Ok(match self {
SelectRef::AllTags | SelectRef::AllRefs => {
let mut refs: Vec<_> = match self {
SelectRef::AllRefs => platform.all()?,
SelectRef::AllTags => platform.tags()?,
_ => unreachable!(),
}
.filter_map(Result::ok)
.filter_map(|mut r: crate::Reference<'_>| {
let target_id = r.target().try_id().map(ToOwned::to_owned);
let peeled_id = r.peel_to_id_in_place().ok()?;
let (prio, tag_time) = match target_id {
Some(target_id) if peeled_id != *target_id => {
let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
(1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
}
_ => (0, 0),
};
(
peeled_id.inner,
prio,
tag_time,
Cow::from(r.inner.name.shorten().to_owned()),
)
.into()
})
.collect();
// By priority, then by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
refs.sort_by(
|(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| {
a_prio
.cmp(b_prio)
.then_with(|| a_time.cmp(b_time))
.then_with(|| b_name.cmp(a_name))
},
);
refs.into_iter().map(|(a, _, _, b)| (a, b)).collect()
}
SelectRef::AnnotatedTags => {
let mut peeled_commits_and_tag_date: Vec<_> = platform
.tags()?
.filter_map(Result::ok)
.filter_map(|r: crate::Reference<'_>| {
// TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
// so rather follow symrefs till the first object and then peel tags after the first object was found.
let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
let tag_time = tag
.tagger()
.ok()
.and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
.unwrap_or(0);
let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
})
.collect();
// Sort by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| {
a_time.cmp(b_time).then_with(|| b_name.cmp(a_name))
});
peeled_commits_and_tag_date
.into_iter()
.map(|(a, _, c)| (a, c))
.collect()
}
})
}
sourcepub fn into_fully_peeled_id(self) -> Result<Id<'repo>, Error>
pub fn into_fully_peeled_id(self) -> Result<Id<'repo>, Error>
Similar to peel_to_id_in_place()
, but consumes this instance.