Struct git_repository::Repository
source · Expand description
A thread-local handle to interact with a repository from a single thread.
It is Send
but not Sync
- for the latter you can convert it to_sync()
.
Note that it clones itself so that it is empty, requiring the user to configure each clone separately, specifically
and explicitly. This is to have the fastest-possible default configuration available by default, but allow
those who experiment with workloads to get speed boosts of 2x or more.
Fields§
§refs: RefStore
A ref store with shared ownership (or the equivalent of it).
objects: OdbHandle
A way to access objects.
Implementations§
source§impl Repository
impl Repository
Configure how caches are used to speed up various git repository operations
sourcepub fn object_cache_size(&mut self, bytes: impl Into<Option<usize>>)
pub fn object_cache_size(&mut self, bytes: impl Into<Option<usize>>)
Sets the amount of space used at most for caching most recently accessed fully decoded objects, to Some(bytes)
,
or None
to deactivate it entirely.
Note that it is unset by default but can be enabled once there is time for performance optimization. Well-chosen cache sizes can improve performance particularly if objects are accessed multiple times in a row. The cache is configured to grow gradually.
Note that a cache on application level should be considered as well as the best object access is not doing one.
sourcepub fn object_cache_size_if_unset(&mut self, bytes: usize)
pub fn object_cache_size_if_unset(&mut self, bytes: usize)
Set an object cache of size bytes
if none is set.
Use this method to avoid overwriting any existing value while assuring better performance in case no value is set.
source§impl Repository
impl Repository
sourcepub fn transport_options<'a>(
&self,
url: impl Into<&'a BStr>,
remote_name: Option<&BStr>
) -> Result<Option<Box<dyn Any>>, Error>
Available on crate features blocking-network-client
or async-network-client
only.
pub fn transport_options<'a>(
&self,
url: impl Into<&'a BStr>,
remote_name: Option<&BStr>
) -> Result<Option<Box<dyn Any>>, Error>
blocking-network-client
or async-network-client
only.Produce configuration suitable for url
, as differentiated by its protocol/scheme, to be passed to a transport instance via
[configure()][git_transport::client::TransportWithoutIO::configure()] (via &**config
to pass the contained Any
and not the Box
).
None
is returned if there is no known configuration. If remote_name
is not None
, the remote’s name may contribute to
configuration overrides, typically for the HTTP transport.
Note that the caller may cast the instance themselves to modify it before passing it on.
For transports that support proxy authentication, the default authentication method will be used with the url of the proxy if it contains a user name.
Examples found in repository?
178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246
async fn fetch_refs(
&mut self,
filter_by_prefix: bool,
extra_parameters: Vec<(String, Option<String>)>,
refspecs: &[git_refspec::RefSpec],
) -> Result<HandshakeWithRefs, Error> {
let mut credentials_storage;
let url = self.transport.to_url();
let authenticate = match self.authenticate.as_mut() {
Some(f) => f,
None => {
let url = self
.remote
.url(Direction::Fetch)
.map(ToOwned::to_owned)
.unwrap_or_else(|| git_url::parse(url.as_ref()).expect("valid URL to be provided by transport"));
credentials_storage = self.configured_credentials(url)?;
&mut credentials_storage
}
};
if self.transport_options.is_none() {
self.transport_options = self
.remote
.repo
.transport_options(url.as_ref(), self.remote.name().map(|n| n.as_bstr()))
.map_err(|err| Error::GatherTransportConfig {
source: err,
url: url.into_owned(),
})?;
}
if let Some(config) = self.transport_options.as_ref() {
self.transport.configure(&**config)?;
}
let mut outcome =
git_protocol::fetch::handshake(&mut self.transport, authenticate, extra_parameters, &mut self.progress)
.await?;
let refs = match outcome.refs.take() {
Some(refs) => refs,
None => {
let agent_feature = self.remote.repo.config.user_agent_tuple();
git_protocol::ls_refs(
&mut self.transport,
&outcome.capabilities,
move |_capabilities, arguments, features| {
features.push(agent_feature);
if filter_by_prefix {
let mut seen = HashSet::new();
for spec in refspecs {
let spec = spec.to_ref();
if seen.insert(spec.instruction()) {
let mut prefixes = Vec::with_capacity(1);
spec.expand_prefixes(&mut prefixes);
for mut prefix in prefixes {
prefix.insert_str(0, "ref-prefix ");
arguments.push(prefix);
}
}
}
}
Ok(git_protocol::ls_refs::Action::Continue)
},
&mut self.progress,
)
.await?
}
};
Ok(HandshakeWithRefs { outcome, refs })
}
source§impl Repository
impl Repository
sourcepub fn remote_names(&self) -> BTreeSet<&str>
pub fn remote_names(&self) -> BTreeSet<&str>
Returns a sorted list unique of symbolic names of remotes that we deem trustworthy.
Examples found in repository?
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
pub fn remote_default_name(&self, direction: remote::Direction) -> Option<Cow<'_, str>> {
let name = (direction == remote::Direction::Push)
.then(|| {
self.config
.resolved
.string_filter("remote", None, "pushDefault", &mut self.filter_config_section())
.and_then(|s| match s {
Cow::Borrowed(s) => s.to_str().ok().map(Cow::Borrowed),
Cow::Owned(s) => s.to_str().ok().map(|s| Cow::Owned(s.into())),
})
})
.flatten();
name.or_else(|| {
let names = self.remote_names();
match names.len() {
0 => None,
1 => names.iter().next().copied().map(Cow::Borrowed),
_more_than_one => names.get("origin").copied().map(Cow::Borrowed),
}
})
}
sourcepub fn remote_default_name(&self, direction: Direction) -> Option<Cow<'_, str>>
pub fn remote_default_name(&self, direction: Direction) -> Option<Cow<'_, str>>
Obtain the branch-independent name for a remote for use in the given direction
, or None
if it could not be determined.
For fetching, use the only configured remote, or default to origin
if it exists.
For pushing, use the remote.pushDefault
trusted configuration key, or fall back to the rules for fetching.
Notes
It’s up to the caller to determine what to do if the current head
is unborn or detached.
source§impl Repository
impl Repository
sourcepub fn branch_names(&self) -> BTreeSet<&str>
pub fn branch_names(&self) -> BTreeSet<&str>
Return a set of unique short branch names for which custom configuration exists in the configuration, if we deem them trustworthy.
sourcepub fn branch_remote_ref<'a>(
&self,
short_branch_name: impl Into<&'a BStr>
) -> Option<Result<Cow<'_, FullNameRef>, ValidateNameError>>
pub fn branch_remote_ref<'a>(
&self,
short_branch_name: impl Into<&'a BStr>
) -> Option<Result<Cow<'_, FullNameRef>, ValidateNameError>>
Returns the validated reference on the remote associated with the given short_branch_name
,
always main
instead of refs/heads/main
.
The returned reference is the one we track on the remote side for merging and pushing.
Returns None
if the remote reference was not found.
May return an error if the reference is invalid.
sourcepub fn branch_remote_name<'a>(
&self,
short_branch_name: impl Into<&'a BStr>
) -> Option<Name<'_>>
pub fn branch_remote_name<'a>(
&self,
short_branch_name: impl Into<&'a BStr>
) -> Option<Name<'_>>
Returns the unvalidated name of the remote associated with the given short_branch_name
,
typically main
instead of refs/heads/main
.
In some cases, the returned name will be an URL.
Returns None
if the remote was not found or if the name contained illformed UTF-8.
See also Reference::remote_name() for a more typesafe version
to be used when a Reference
is available.
source§impl Repository
impl Repository
General Configuration
sourcepub fn config_snapshot(&self) -> Snapshot<'_>
pub fn config_snapshot(&self) -> Snapshot<'_>
Return a snapshot of the configuration as seen upon opening the repository.
sourcepub fn config_snapshot_mut(&mut self) -> SnapshotMut<'_>
pub fn config_snapshot_mut(&mut self) -> SnapshotMut<'_>
Return a mutable snapshot of the configuration as seen upon opening the repository, starting a transaction. When the returned instance is dropped, it is applied in full, even if the reason for the drop is an error.
Note that changes to the configuration are in-memory only and are observed only the this instance
of the Repository
.
sourcepub fn open_options(&self) -> &Options
pub fn open_options(&self) -> &Options
The options used to open the repository.
sourcepub fn object_hash(&self) -> Kind
pub fn object_hash(&self) -> Kind
The kind of object hash the repository is configured to use.
Examples found in repository?
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212
pub fn find_object(&self, id: impl Into<ObjectId>) -> Result<Object<'_>, object::find::existing::Error> {
let id = id.into();
if id == git_hash::ObjectId::empty_tree(self.object_hash()) {
return Ok(Object {
id,
kind: git_object::Kind::Tree,
data: Vec::new(),
repo: self,
});
}
let mut buf = self.free_buf();
let kind = self.objects.find(id, &mut buf)?.kind;
Ok(Object::from_data(id, kind, buf, self))
}
/// Try to find the object with `id` or return `None` it it wasn't found.
pub fn try_find_object(&self, id: impl Into<ObjectId>) -> Result<Option<Object<'_>>, object::find::Error> {
let id = id.into();
if id == git_hash::ObjectId::empty_tree(self.object_hash()) {
return Ok(Some(Object {
id,
kind: git_object::Kind::Tree,
data: Vec::new(),
repo: self,
}));
}
let mut buf = self.free_buf();
match self.objects.try_find(id, &mut buf)? {
Some(obj) => {
let kind = obj.kind;
Ok(Some(Object::from_data(id, kind, buf, self)))
}
None => Ok(None),
}
}
/// Write the given object into the object database and return its object id.
pub fn write_object(&self, object: impl git_object::WriteTo) -> Result<Id<'_>, object::write::Error> {
self.objects
.write(object)
.map(|oid| oid.attach(self))
.map_err(Into::into)
}
/// Write a blob from the given `bytes`.
pub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result<Id<'_>, object::write::Error> {
self.objects
.write_buf(git_object::Kind::Blob, bytes.as_ref())
.map(|oid| oid.attach(self))
}
/// Write a blob from the given `Read` implementation.
pub fn write_blob_stream(
&self,
mut bytes: impl std::io::Read + std::io::Seek,
) -> Result<Id<'_>, object::write::Error> {
let current = bytes.stream_position()?;
let len = bytes.seek(std::io::SeekFrom::End(0))? - current;
bytes.seek(std::io::SeekFrom::Start(current))?;
self.objects
.write_stream(git_object::Kind::Blob, len, bytes)
.map(|oid| oid.attach(self))
}
/// Create a tag reference named `name` (without `refs/tags/` prefix) pointing to a newly created tag object
/// which in turn points to `target` and return the newly created reference.
///
/// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist]
/// or to [force overwriting a possibly existing tag](PreviousValue::Any).
pub fn tag(
&self,
name: impl AsRef<str>,
target: impl AsRef<git_hash::oid>,
target_kind: git_object::Kind,
tagger: Option<git_actor::SignatureRef<'_>>,
message: impl AsRef<str>,
constraint: PreviousValue,
) -> Result<Reference<'_>, tag::Error> {
let tag = git_object::Tag {
target: target.as_ref().into(),
target_kind,
name: name.as_ref().into(),
tagger: tagger.map(|t| t.to_owned()),
message: message.as_ref().into(),
pgp_signature: None,
};
let tag_id = self.write_object(&tag)?;
self.tag_reference(name, tag_id, constraint).map_err(Into::into)
}
/// Similar to [`commit(…)`][crate::Repository::commit()], but allows to create the commit with `committer` and `author` specified.
///
/// This forces setting the commit time and author time by hand. Note that typically, committer and author are the same.
pub fn commit_as<'a, 'c, Name, E>(
&self,
committer: impl Into<git_actor::SignatureRef<'c>>,
author: impl Into<git_actor::SignatureRef<'a>>,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>,
) -> Result<Id<'_>, commit::Error>
where
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
use git_ref::{
transaction::{Change, RefEdit},
Target,
};
// TODO: possibly use CommitRef to save a few allocations (but will have to allocate for object ids anyway.
// This can be made vastly more efficient though if we wanted to, so we lie in the API
let reference = reference.try_into()?;
let commit = git_object::Commit {
message: message.as_ref().into(),
tree: tree.into(),
author: author.into().to_owned(),
committer: committer.into().to_owned(),
encoding: None,
parents: parents.into_iter().map(|id| id.into()).collect(),
extra_headers: Default::default(),
};
let commit_id = self.write_object(&commit)?;
self.edit_reference(RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: crate::reference::log::message("commit", commit.message.as_ref(), commit.parents.len()),
},
expected: match commit.parents.first().map(|p| Target::Peeled(*p)) {
Some(previous) => {
if reference.as_bstr() == "HEAD" {
PreviousValue::MustExistAndMatch(previous)
} else {
PreviousValue::ExistingMustMatch(previous)
}
}
None => PreviousValue::MustNotExist,
},
new: Target::Peeled(commit_id.inner),
},
name: reference,
deref: true,
})?;
Ok(commit_id)
}
/// Create a new commit object with `message` referring to `tree` with `parents`, and point `reference`
/// to it. The commit is written without message encoding field, which can be assumed to be UTF-8.
/// `author` and `committer` fields are pre-set from the configuration, which can be altered
/// [temporarily][crate::Repository::config_snapshot_mut()] before the call if required.
///
/// `reference` will be created if it doesn't exist, and can be `"HEAD"` to automatically write-through to the symbolic reference
/// that `HEAD` points to if it is not detached. For this reason, detached head states cannot be created unless the `HEAD` is detached
/// already. The reflog will be written as canonical git would do, like `<operation> (<detail>): <summary>`.
///
/// The first parent id in `parents` is expected to be the current target of `reference` and the operation will fail if it is not.
/// If there is no parent, the `reference` is expected to not exist yet.
///
/// The method fails immediately if a `reference` lock can't be acquired.
pub fn commit<Name, E>(
&self,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>,
) -> Result<Id<'_>, commit::Error>
where
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
let author = self.author_or_default();
let committer = self.committer_or_default();
self.commit_as(committer, author, reference, message, tree, parents)
}
/// Return an empty tree object, suitable for [getting changes](crate::Tree::changes()).
///
/// Note that it is special and doesn't physically exist in the object database even though it can be returned.
/// This means that this object can be used in an uninitialized, empty repository which would report to have no objects at all.
pub fn empty_tree(&self) -> Tree<'_> {
self.find_object(git_hash::ObjectId::empty_tree(self.object_hash()))
.expect("always present")
.into_tree()
}
More examples
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
pub fn open_index(&self) -> Result<git_index::File, worktree::open_index::Error> {
let thread_limit = self
.config
.resolved
.boolean("index", None, "threads")
.map(|res| {
res.map(|value| usize::from(!value)).or_else(|err| {
git_config::Integer::try_from(err.input.as_ref())
.map_err(|err| worktree::open_index::Error::ConfigIndexThreads {
value: err.input.clone(),
err,
})
.map(|value| value.to_decimal().and_then(|v| v.try_into().ok()).unwrap_or(1))
})
})
.transpose()?;
git_index::File::at(
self.index_path(),
self.object_hash(),
git_index::decode::Options {
thread_limit,
min_extension_block_in_bytes_for_threading: 0,
},
)
.map_err(Into::into)
}
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
fn setup_branch_config(
repo: &mut Repository,
branch: &FullNameRef,
branch_id: Option<&git_hash::oid>,
remote_name: &BStr,
) -> Result<(), Error> {
let short_name = match branch.category_and_short_name() {
Some((cat, shortened)) if cat == git_ref::Category::LocalBranch => match shortened.to_str() {
Ok(s) => s,
Err(_) => return Ok(()),
},
_ => return Ok(()),
};
let remote = repo
.find_remote(remote_name)
.expect("remote was just created and must be visible in config");
let group = git_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(|s| s.to_ref()));
let null = git_hash::ObjectId::null(repo.object_hash());
let res = group.match_remotes(
Some(git_refspec::match_group::Item {
full_ref_name: branch.as_bstr(),
target: branch_id.unwrap_or(&null),
object: None,
})
.into_iter(),
);
if !res.mappings.is_empty() {
let mut metadata = git_config::file::Metadata::from(git_config::Source::Local);
let config_path = remote.repo.git_dir().join("config");
metadata.path = Some(config_path.clone());
let mut config =
git_config::File::from_paths_metadata(Some(metadata), Default::default())?.expect("one file to load");
let mut section = config
.new_section("branch", Some(Cow::Owned(short_name.into())))
.expect("section header name is always valid per naming rules, our input branch name is valid");
section.push("remote".try_into().expect("valid at compile time"), Some(remote_name));
section.push(
"merge".try_into().expect("valid at compile time"),
Some(branch.as_bstr()),
);
std::fs::write(config_path, config.to_bstring())?;
replace_changed_local_config_file(repo, config);
}
Ok(())
}
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
pub fn fetch_only<P>(
&mut self,
progress: P,
should_interrupt: &std::sync::atomic::AtomicBool,
) -> Result<(Repository, crate::remote::fetch::Outcome), Error>
where
P: crate::Progress,
P::SubProgress: 'static,
{
use crate::remote;
use crate::{bstr::ByteVec, remote::fetch::RefLogMessage};
let repo = self
.repo
.as_mut()
.expect("user error: multiple calls are allowed only until it succeeds");
let remote_name = match self.remote_name.as_ref() {
Some(name) => name.to_owned(),
None => repo
.config
.resolved
.string_by_key("clone.defaultRemoteName")
.map(|n| remote::name::validated(n.into_owned()))
.unwrap_or_else(|| Ok("origin".into()))?,
};
let mut remote = repo
.remote_at(self.url.clone())?
.with_refspecs(
Some(format!("+refs/heads/*:refs/remotes/{remote_name}/*").as_str()),
remote::Direction::Fetch,
)
.expect("valid static spec");
let mut clone_fetch_tags = None;
if let Some(f) = self.configure_remote.as_mut() {
remote = f(remote).map_err(|err| Error::RemoteConfiguration(err))?;
} else {
clone_fetch_tags = remote::fetch::Tags::All.into();
}
let config = util::write_remote_to_local_config_file(&mut remote, remote_name.clone())?;
// Now we are free to apply remote configuration we don't want to be written to disk.
if let Some(fetch_tags) = clone_fetch_tags {
remote = remote.with_fetch_tags(fetch_tags);
}
// Add HEAD after the remote was written to config, we need it to know what to checkout later, and assure
// the ref that HEAD points to is present no matter what.
let head_refspec = git_refspec::parse(
format!("HEAD:refs/remotes/{remote_name}/HEAD").as_str().into(),
git_refspec::parse::Operation::Fetch,
)
.expect("valid")
.to_owned();
let pending_pack: remote::fetch::Prepare<'_, '_, _, _> =
remote.connect(remote::Direction::Fetch, progress)?.prepare_fetch({
let mut opts = self.fetch_options.clone();
if !opts.extra_refspecs.contains(&head_refspec) {
opts.extra_refspecs.push(head_refspec)
}
opts
})?;
if pending_pack.ref_map().object_hash != repo.object_hash() {
unimplemented!("configure repository to expect a different object hash as advertised by the server")
}
let reflog_message = {
let mut b = self.url.to_bstring();
b.insert_str(0, "clone: from ");
b
};
let outcome = pending_pack
.with_write_packed_refs_only(true)
.with_reflog_message(RefLogMessage::Override {
message: reflog_message.clone(),
})
.receive(should_interrupt)?;
util::replace_changed_local_config_file(repo, config);
util::update_head(
repo,
&outcome.ref_map.remote_refs,
reflog_message.as_ref(),
remote_name.as_ref(),
)?;
Ok((self.repo.take().expect("still present"), outcome))
}
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221
pub async fn receive(mut self, should_interrupt: &AtomicBool) -> Result<Outcome, Error> {
let mut con = self.con.take().expect("receive() can only be called once");
let handshake = &self.ref_map.handshake;
let protocol_version = handshake.server_protocol_version;
let fetch = git_protocol::Command::Fetch;
let progress = &mut con.progress;
let repo = con.remote.repo;
let fetch_features = {
let mut f = fetch.default_features(protocol_version, &handshake.capabilities);
f.push(repo.config.user_agent_tuple());
f
};
git_protocol::fetch::Response::check_required_features(protocol_version, &fetch_features)?;
let sideband_all = fetch_features.iter().any(|(n, _)| *n == "sideband-all");
let mut arguments = git_protocol::fetch::Arguments::new(protocol_version, fetch_features);
if matches!(con.remote.fetch_tags, crate::remote::fetch::Tags::Included) {
if !arguments.can_use_include_tag() {
unimplemented!("we expect servers to support 'include-tag', otherwise we have to implement another pass to fetch attached tags separately");
}
arguments.use_include_tag();
}
let mut previous_response = None::<git_protocol::fetch::Response>;
let mut round = 1;
if self.ref_map.object_hash != repo.object_hash() {
return Err(Error::IncompatibleObjectHash {
local: repo.object_hash(),
remote: self.ref_map.object_hash,
});
}
let reader = 'negotiation: loop {
progress.step();
progress.set_name(format!("negotiate (round {})", round));
let is_done = match negotiate::one_round(
negotiate::Algorithm::Naive,
round,
repo,
&self.ref_map,
con.remote.fetch_tags,
&mut arguments,
previous_response.as_ref(),
) {
Ok(_) if arguments.is_empty() => {
git_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
let update_refs = refs::update(
repo,
self.reflog_message
.take()
.unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
&self.ref_map.mappings,
con.remote.refspecs(remote::Direction::Fetch),
&self.ref_map.extra_refspecs,
con.remote.fetch_tags,
self.dry_run,
self.write_packed_refs,
)?;
return Ok(Outcome {
ref_map: std::mem::take(&mut self.ref_map),
status: Status::NoPackReceived { update_refs },
});
}
Ok(is_done) => is_done,
Err(err) => {
git_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
return Err(err.into());
}
};
round += 1;
let mut reader = arguments.send(&mut con.transport, is_done).await?;
if sideband_all {
setup_remote_progress(progress, &mut reader);
}
let response = git_protocol::fetch::Response::from_line_reader(protocol_version, &mut reader).await?;
if response.has_pack() {
progress.step();
progress.set_name("receiving pack");
if !sideband_all {
setup_remote_progress(progress, &mut reader);
}
break 'negotiation reader;
} else {
previous_response = Some(response);
}
};
let options = git_pack::bundle::write::Options {
thread_limit: config::index_threads(repo)?,
index_version: config::pack_index_version(repo)?,
iteration_mode: git_pack::data::input::Mode::Verify,
object_hash: con.remote.repo.object_hash(),
};
let mut write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) {
Some(git_pack::Bundle::write_to_directory(
#[cfg(feature = "async-network-client")]
{
git_protocol::futures_lite::io::BlockOn::new(reader)
},
#[cfg(not(feature = "async-network-client"))]
{
reader
},
Some(repo.objects.store_ref().path().join("pack")),
con.progress,
should_interrupt,
Some(Box::new({
let repo = repo.clone();
move |oid, buf| repo.objects.find(oid, buf).ok()
})),
options,
)?)
} else {
drop(reader);
None
};
if matches!(protocol_version, git_protocol::transport::Protocol::V2) {
git_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
}
let update_refs = refs::update(
repo,
self.reflog_message
.take()
.unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
&self.ref_map.mappings,
con.remote.refspecs(remote::Direction::Fetch),
&self.ref_map.extra_refspecs,
con.remote.fetch_tags,
self.dry_run,
self.write_packed_refs,
)?;
if let Some(bundle) = write_pack_bundle.as_mut() {
if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 {
if let Some(path) = bundle.keep_path.take() {
std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?;
}
}
}
Ok(Outcome {
ref_map: std::mem::take(&mut self.ref_map),
status: match write_pack_bundle {
Some(write_pack_bundle) => Status::Change {
write_pack_bundle,
update_refs,
},
None => Status::DryRun { update_refs },
},
})
}
source§impl Repository
impl Repository
Identity handling.
sourcepub fn user_default(&self) -> SignatureRef<'_>
pub fn user_default(&self) -> SignatureRef<'_>
Return a crate-specific constant signature with Time
set to now, or whatever
was overridden via GIT_COMMITTER_TIME
or GIT_AUTHOR_TIME
if these variables are allowed to be read,
in a similar vein as the default that git chooses if there is nothing configured.
This can be useful as fallback for an unset committer
or author
.
Note
The values are cached when the repository is instantiated.
Examples found in repository?
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89
pub fn committer_or_default(&self) -> git_actor::SignatureRef<'_> {
self.committer().unwrap_or_else(|| self.user_default())
}
/// Return the author as configured by this repository, which is determined by…
///
/// * …the git configuration `author.name|email`…
/// * …the `GIT_AUTHOR_(NAME|EMAIL|DATE)` environment variables…
/// * …the configuration for `user.name|email` as fallback…
///
/// …and in that order, or `None` if there was nothing configured. In that case, one may use the
/// [`author_or_default()`][Self::author_or_default()] method.
///
/// # Note
///
/// The values are cached when the repository is instantiated.
pub fn author(&self) -> Option<git_actor::SignatureRef<'_>> {
let p = self.config.personas();
git_actor::SignatureRef {
name: p.author.name.as_ref().or(p.user.name.as_ref()).map(|v| v.as_ref())?,
email: p.author.email.as_ref().or(p.user.email.as_ref()).map(|v| v.as_ref())?,
time: p.author.time.unwrap_or_else(git_date::Time::now_local_or_utc),
}
.into()
}
/// Like [`author()`][Self::author()], but may use a default value in case nothing is configured.
pub fn author_or_default(&self) -> git_actor::SignatureRef<'_> {
self.author().unwrap_or_else(|| self.user_default())
}
sourcepub fn committer(&self) -> Option<SignatureRef<'_>>
pub fn committer(&self) -> Option<SignatureRef<'_>>
Return the committer as configured by this repository, which is determined by…
- …the git configuration
committer.name|email
… - …the
GIT_COMMITTER_(NAME|EMAIL|DATE)
environment variables… - …the configuration for
user.name|email
as fallback…
…and in that order, or None
if there was nothing configured. In that case, one may use the
committer_or_default()
method.
Note
The values are cached when the repository is instantiated.
sourcepub fn committer_or_default(&self) -> SignatureRef<'_>
pub fn committer_or_default(&self) -> SignatureRef<'_>
Like committer()
, but may use a default value in case nothing is configured.
Examples found in repository?
More examples
188 189 190 191 192 193 194 195 196 197 198 199 200 201 202
pub fn commit<Name, E>(
&self,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>,
) -> Result<Id<'_>, commit::Error>
where
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
let author = self.author_or_default();
let committer = self.committer_or_default();
self.commit_as(committer, author, reference, message, tree, parents)
}
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170
pub fn update_head(
repo: &mut Repository,
remote_refs: &[git_protocol::handshake::Ref],
reflog_message: &BStr,
remote_name: &BStr,
) -> Result<(), Error> {
use git_ref::{
transaction::{PreviousValue, RefEdit},
Target,
};
let (head_peeled_id, head_ref) = match remote_refs.iter().find_map(|r| {
Some(match r {
git_protocol::handshake::Ref::Symbolic {
full_ref_name,
target,
object,
} if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target)),
git_protocol::handshake::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => {
(Some(object.as_ref()), None)
}
git_protocol::handshake::Ref::Unborn { full_ref_name, target } if full_ref_name == "HEAD" => {
(None, Some(target))
}
_ => return None,
})
}) {
Some(t) => t,
None => return Ok(()),
};
let head: git_ref::FullName = "HEAD".try_into().expect("valid");
let reflog_message = || LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: reflog_message.to_owned(),
};
match head_ref {
Some(referent) => {
let referent: git_ref::FullName = referent.try_into().map_err(|err| Error::InvalidHeadRef {
head_ref_name: referent.to_owned(),
source: err,
})?;
repo.refs
.transaction()
.packed_refs(git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates(
Box::new(|oid, buf| {
repo.objects
.try_find(oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}),
))
.prepare(
{
let mut edits = vec![RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Symbolic(referent.clone()),
},
name: head.clone(),
deref: false,
}];
if let Some(head_peeled_id) = head_peeled_id {
edits.push(RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Peeled(head_peeled_id.to_owned()),
},
name: referent.clone(),
deref: false,
});
};
edits
},
git_lock::acquire::Fail::Immediately,
git_lock::acquire::Fail::Immediately,
)
.map_err(crate::reference::edit::Error::from)?
.commit(repo.committer_or_default())
.map_err(crate::reference::edit::Error::from)?;
if let Some(head_peeled_id) = head_peeled_id {
let mut log = reflog_message();
log.mode = RefLog::Only;
repo.edit_reference(RefEdit {
change: git_ref::transaction::Change::Update {
log,
expected: PreviousValue::Any,
new: Target::Peeled(head_peeled_id.to_owned()),
},
name: head,
deref: false,
})?;
}
setup_branch_config(repo, referent.as_ref(), head_peeled_id, remote_name)?;
}
None => {
repo.edit_reference(RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Peeled(
head_peeled_id
.expect("detached heads always point to something")
.to_owned(),
),
},
name: head,
deref: false,
})?;
}
};
Ok(())
}
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
pub(crate) fn update(
repo: &Repository,
message: RefLogMessage,
mappings: &[fetch::Mapping],
refspecs: &[git_refspec::RefSpec],
extra_refspecs: &[git_refspec::RefSpec],
fetch_tags: fetch::Tags,
dry_run: fetch::DryRun,
write_packed_refs: fetch::WritePackedRefs,
) -> Result<update::Outcome, update::Error> {
let mut edits = Vec::new();
let mut updates = Vec::new();
let implicit_tag_refspec = fetch_tags
.to_refspec()
.filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
|fetch::Mapping {
remote,
local,
spec_index,
}| {
spec_index.get(refspecs, extra_refspecs).map(|spec| {
(
remote,
local,
spec,
implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
)
})
},
) {
let remote_id = match remote.as_id() {
Some(id) => id,
None => continue,
};
if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
let update = if is_implicit_tag {
update::Mode::ImplicitTagNotSentByRemote.into()
} else {
update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
};
updates.push(update);
continue;
}
let checked_out_branches = worktree_branches(repo)?;
let (mode, edit_index) = match local {
Some(name) => {
let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
Some(existing) => {
if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
let mode = update::Mode::RejectedCurrentlyCheckedOut {
worktree_dir: wt_dir.to_owned(),
};
updates.push(mode.into());
continue;
}
match existing.target() {
TargetRef::Symbolic(_) => {
updates.push(update::Mode::RejectedSymbolic.into());
continue;
}
TargetRef::Peeled(local_id) => {
let previous_value =
PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
let (mode, reflog_message) = if local_id == remote_id {
(update::Mode::NoChangeNeeded, "no update will be performed")
} else if let Some(git_ref::Category::Tag) = existing.name().category() {
if spec.allow_non_fast_forward() {
(update::Mode::Forced, "updating tag")
} else {
updates.push(update::Mode::RejectedTagUpdate.into());
continue;
}
} else {
let mut force = spec.allow_non_fast_forward();
let is_fast_forward = match dry_run {
fetch::DryRun::No => {
let ancestors = repo
.find_object(local_id)?
.try_into_commit()
.map_err(|_| ())
.and_then(|c| {
c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
}).and_then(|local_commit_time|
remote_id
.to_owned()
.ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
.sorting(
git_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
time_in_seconds_since_epoch: local_commit_time
},
)
.map_err(|_| ())
);
match ancestors {
Ok(mut ancestors) => {
ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
}
Err(_) => {
force = true;
false
}
}
}
fetch::DryRun::Yes => true,
};
if is_fast_forward {
(
update::Mode::FastForward,
matches!(dry_run, fetch::DryRun::Yes)
.then(|| "fast-forward (guessed in dry-run)")
.unwrap_or("fast-forward"),
)
} else if force {
(update::Mode::Forced, "forced-update")
} else {
updates.push(update::Mode::RejectedNonFastForward.into());
continue;
}
};
(mode, reflog_message, existing.name().to_owned(), previous_value)
}
}
}
None => {
let name: git_ref::FullName = name.try_into()?;
let reflog_msg = match name.category() {
Some(git_ref::Category::Tag) => "storing tag",
Some(git_ref::Category::LocalBranch) => "storing head",
_ => "storing ref",
};
(
update::Mode::New,
reflog_msg,
name,
PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
)
}
};
let edit = RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: message.compose(reflog_message),
},
expected: previous_value,
new: if let Source::Ref(git_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
match mappings.iter().find_map(|m| {
m.remote.as_name().and_then(|name| {
(name == target)
.then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
.flatten()
})
}) {
Some(local_branch) => {
// This is always safe because…
// - the reference may exist already
// - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
// - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
// target reference still exists and we can point to it.
Target::Symbolic(local_branch)
}
None => Target::Peeled(remote_id.into()),
}
} else {
Target::Peeled(remote_id.into())
},
},
name,
deref: false,
};
let edit_index = edits.len();
edits.push(edit);
(mode, Some(edit_index))
}
None => (update::Mode::NoChangeNeeded, None),
};
updates.push(Update { mode, edit_index })
}
let edits = match dry_run {
fetch::DryRun::No => {
let (file_lock_fail, packed_refs_lock_fail) = repo
.config
.lock_timeout()
.map_err(crate::reference::edit::Error::from)?;
repo.refs
.transaction()
.packed_refs(
match write_packed_refs {
fetch::WritePackedRefs::Only => {
git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
repo.objects
.try_find(oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}))},
fetch::WritePackedRefs::Never => git_ref::file::transaction::PackedRefs::DeletionsOnly
}
)
.prepare(edits, file_lock_fail, packed_refs_lock_fail)
.map_err(crate::reference::edit::Error::from)?
.commit(repo.committer_or_default())
.map_err(crate::reference::edit::Error::from)?
}
fetch::DryRun::Yes => edits,
};
Ok(update::Outcome { edits, updates })
}
Return the author as configured by this repository, which is determined by…
- …the git configuration
author.name|email
… - …the
GIT_AUTHOR_(NAME|EMAIL|DATE)
environment variables… - …the configuration for
user.name|email
as fallback…
…and in that order, or None
if there was nothing configured. In that case, one may use the
author_or_default()
method.
Note
The values are cached when the repository is instantiated.
Like author()
, but may use a default value in case nothing is configured.
Examples found in repository?
188 189 190 191 192 193 194 195 196 197 198 199 200 201 202
pub fn commit<Name, E>(
&self,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>,
) -> Result<Id<'_>, commit::Error>
where
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
let author = self.author_or_default();
let committer = self.committer_or_default();
self.commit_as(committer, author, reference, message, tree, parents)
}
source§impl Repository
impl Repository
sourcepub fn into_sync(self) -> ThreadSafeRepository
pub fn into_sync(self) -> ThreadSafeRepository
Convert this instance into a ThreadSafeRepository
by dropping all thread-local data.
source§impl Repository
impl Repository
sourcepub fn git_dir(&self) -> &Path
pub fn git_dir(&self) -> &Path
Return the path to the repository itself, containing objects, references, configuration, and more.
Synonymous to path()
.
Examples found in repository?
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158
pub fn is_locked(&self) -> bool {
Proxy::new(self.parent, self.parent.git_dir()).is_locked()
}
/// Provide a reason for the locking of this worktree, if it is locked at all.
///
/// Note that we squelch errors in case the file cannot be read in which case the
/// reason is an empty string.
pub fn lock_reason(&self) -> Option<BString> {
Proxy::new(self.parent, self.parent.git_dir()).lock_reason()
}
/// Return the ID of the repository worktree, if it is a linked worktree, or `None` if it's a linked worktree.
pub fn id(&self) -> Option<&BStr> {
id(self.parent.git_dir(), self.parent.common_dir.is_some())
}
}
pub(crate) fn id(git_dir: &std::path::Path, has_common_dir: bool) -> Option<&BStr> {
if !has_common_dir {
return None;
}
let candidate = git_path::os_str_into_bstr(git_dir.file_name().expect("at least one directory level"))
.expect("no illformed UTF-8");
let maybe_worktrees = git_dir.parent()?;
(maybe_worktrees.file_name()?.to_str()? == "worktrees").then(|| candidate)
}
///
pub mod proxy;
///
pub mod open_index {
use crate::bstr::BString;
/// The error returned by [`Worktree::open_index()`][crate::Worktree::open_index()].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error("Could not interpret value '{}' as 'index.threads'", .value)]
ConfigIndexThreads {
value: BString,
#[source]
err: git_config::value::Error,
},
#[error(transparent)]
IndexFile(#[from] git_index::file::init::Error),
}
impl<'repo> crate::Worktree<'repo> {
/// A shortcut to [`crate::Repository::open_index()`].
pub fn open_index(&self) -> Result<git_index::File, Error> {
self.parent.open_index()
}
/// A shortcut to [`crate::Repository::index()`].
pub fn index(&self) -> Result<crate::worktree::Index, Error> {
self.parent.index()
}
}
}
///
pub mod excludes {
use std::path::PathBuf;
/// The error returned by [`Worktree::excludes()`][crate::Worktree::excludes()].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error("Could not read repository exclude.")]
Io(#[from] std::io::Error),
#[error(transparent)]
EnvironmentPermission(#[from] git_sec::permission::Error<PathBuf>),
#[error("The value for `core.excludesFile` could not be read from configuration")]
ExcludesFilePathInterpolation(#[from] git_config::path::interpolate::Error),
}
impl<'repo> crate::Worktree<'repo> {
/// Configure a file-system cache checking if files below the repository are excluded.
///
/// This takes into consideration all the usual repository configuration.
// TODO: test
pub fn excludes<'a>(
&self,
index: &'a git_index::State,
overrides: Option<git_attributes::MatchGroup<git_attributes::Ignore>>,
) -> Result<git_worktree::fs::Cache<'a>, Error> {
let repo = self.parent;
let case = repo
.config
.ignore_case
.then(|| git_glob::pattern::Case::Fold)
.unwrap_or_default();
let mut buf = Vec::with_capacity(512);
let excludes_file = match repo.config.excludes_file().transpose()? {
Some(user_path) => Some(user_path),
None => repo.config.xdg_config_path("ignore")?,
};
let state = git_worktree::fs::cache::State::IgnoreStack(git_worktree::fs::cache::state::Ignore::new(
overrides.unwrap_or_default(),
git_attributes::MatchGroup::<git_attributes::Ignore>::from_git_dir(
repo.git_dir(),
excludes_file,
&mut buf,
)?,
None,
case,
));
let attribute_list = state.build_attribute_list(index, index.path_backing(), case);
Ok(git_worktree::fs::Cache::new(
self.path,
state,
case,
buf,
attribute_list,
))
}
More examples
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
pub fn common_dir(&self) -> &std::path::Path {
self.common_dir.as_deref().unwrap_or_else(|| self.git_dir())
}
/// Return the path to the worktree index file, which may or may not exist.
pub fn index_path(&self) -> PathBuf {
self.git_dir().join("index")
}
/// The path to the `.git` directory itself, or equivalent if this is a bare repository.
pub fn path(&self) -> &std::path::Path {
self.git_dir()
}
/// Return the work tree containing all checked out files, if there is one.
pub fn work_dir(&self) -> Option<&std::path::Path> {
self.work_tree.as_deref()
}
// TODO: tests, respect precomposeUnicode
/// The directory of the binary path of the current process.
pub fn install_dir(&self) -> std::io::Result<PathBuf> {
crate::path::install_dir()
}
/// Returns the relative path which is the components between the working tree and the current working dir (CWD).
/// Note that there may be `None` if there is no work tree, even though the `PathBuf` will be empty
/// if the CWD is at the root of the work tree.
// TODO: tests, details - there is a lot about environment variables to change things around.
pub fn prefix(&self) -> Option<std::io::Result<PathBuf>> {
self.work_tree.as_ref().map(|root| {
std::env::current_dir().and_then(|cwd| {
git_path::realpath_opts(root, &cwd, MAX_SYMLINKS)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
.and_then(|root| {
cwd.strip_prefix(&root)
.map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::Other,
format!(
"CWD '{}' isn't within the work tree '{}'",
cwd.display(),
root.display()
),
)
})
.map(ToOwned::to_owned)
})
})
})
}
/// Return the kind of repository, either bare or one with a work tree.
pub fn kind(&self) -> crate::Kind {
match self.worktree() {
Some(wt) => {
if git_discover::is_submodule_git_dir(self.git_dir()) {
crate::Kind::Submodule
} else {
crate::Kind::WorkTree {
is_linked: !wt.is_main(),
}
}
}
None => crate::Kind::Bare,
}
}
16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Repository")
.field("kind", &self.kind())
.field("git_dir", &self.git_dir())
.field("work_dir", &self.work_dir())
.finish()
}
}
impl PartialEq<crate::Repository> for crate::Repository {
fn eq(&self, other: &crate::Repository) -> bool {
self.git_dir().canonicalize().ok() == other.git_dir().canonicalize().ok()
&& self.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok())
== other.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok())
}
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
pub fn write_remote_to_local_config_file(
remote: &mut crate::Remote<'_>,
remote_name: BString,
) -> Result<git_config::File<'static>, Error> {
let mut metadata = git_config::file::Metadata::from(git_config::Source::Local);
let config_path = remote.repo.git_dir().join("config");
metadata.path = Some(config_path.clone());
let mut config =
git_config::File::from_paths_metadata(Some(metadata), Default::default())?.expect("one file to load");
remote.save_as_to(remote_name, &mut config)?;
std::fs::write(config_path, config.to_bstring())?;
Ok(config)
}
pub fn replace_changed_local_config_file(repo: &mut Repository, mut config: git_config::File<'static>) {
let repo_config = git_features::threading::OwnShared::make_mut(&mut repo.config.resolved);
let ids_to_remove: Vec<_> = repo_config
.sections_and_ids()
.filter_map(|(s, id)| {
matches!(s.meta().source, git_config::Source::Local | git_config::Source::Api).then(|| id)
})
.collect();
for id in ids_to_remove {
repo_config.remove_section_by_id(id);
}
crate::config::overrides::append(
&mut config,
&repo.options.api_config_overrides,
git_config::Source::Api,
|_| None,
)
.expect("applied once and can be applied again");
repo_config.append(config);
repo.reread_values_and_clear_caches()
.expect("values could be read once and can be read again");
}
/// HEAD cannot be written by means of refspec by design, so we have to do it manually here. Also create the pointed-to ref
/// if we have to, as it might not have been naturally included in the ref-specs.
pub fn update_head(
repo: &mut Repository,
remote_refs: &[git_protocol::handshake::Ref],
reflog_message: &BStr,
remote_name: &BStr,
) -> Result<(), Error> {
use git_ref::{
transaction::{PreviousValue, RefEdit},
Target,
};
let (head_peeled_id, head_ref) = match remote_refs.iter().find_map(|r| {
Some(match r {
git_protocol::handshake::Ref::Symbolic {
full_ref_name,
target,
object,
} if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target)),
git_protocol::handshake::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => {
(Some(object.as_ref()), None)
}
git_protocol::handshake::Ref::Unborn { full_ref_name, target } if full_ref_name == "HEAD" => {
(None, Some(target))
}
_ => return None,
})
}) {
Some(t) => t,
None => return Ok(()),
};
let head: git_ref::FullName = "HEAD".try_into().expect("valid");
let reflog_message = || LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: reflog_message.to_owned(),
};
match head_ref {
Some(referent) => {
let referent: git_ref::FullName = referent.try_into().map_err(|err| Error::InvalidHeadRef {
head_ref_name: referent.to_owned(),
source: err,
})?;
repo.refs
.transaction()
.packed_refs(git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates(
Box::new(|oid, buf| {
repo.objects
.try_find(oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}),
))
.prepare(
{
let mut edits = vec![RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Symbolic(referent.clone()),
},
name: head.clone(),
deref: false,
}];
if let Some(head_peeled_id) = head_peeled_id {
edits.push(RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Peeled(head_peeled_id.to_owned()),
},
name: referent.clone(),
deref: false,
});
};
edits
},
git_lock::acquire::Fail::Immediately,
git_lock::acquire::Fail::Immediately,
)
.map_err(crate::reference::edit::Error::from)?
.commit(repo.committer_or_default())
.map_err(crate::reference::edit::Error::from)?;
if let Some(head_peeled_id) = head_peeled_id {
let mut log = reflog_message();
log.mode = RefLog::Only;
repo.edit_reference(RefEdit {
change: git_ref::transaction::Change::Update {
log,
expected: PreviousValue::Any,
new: Target::Peeled(head_peeled_id.to_owned()),
},
name: head,
deref: false,
})?;
}
setup_branch_config(repo, referent.as_ref(), head_peeled_id, remote_name)?;
}
None => {
repo.edit_reference(RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Peeled(
head_peeled_id
.expect("detached heads always point to something")
.to_owned(),
),
},
name: head,
deref: false,
})?;
}
};
Ok(())
}
/// Setup the remote configuration for `branch` so that it points to itself, but on the remote, if an only if currently saved refspec
/// is able to match it.
/// For that we reload the remote of `remote_name` and use its ref_specs for match.
fn setup_branch_config(
repo: &mut Repository,
branch: &FullNameRef,
branch_id: Option<&git_hash::oid>,
remote_name: &BStr,
) -> Result<(), Error> {
let short_name = match branch.category_and_short_name() {
Some((cat, shortened)) if cat == git_ref::Category::LocalBranch => match shortened.to_str() {
Ok(s) => s,
Err(_) => return Ok(()),
},
_ => return Ok(()),
};
let remote = repo
.find_remote(remote_name)
.expect("remote was just created and must be visible in config");
let group = git_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(|s| s.to_ref()));
let null = git_hash::ObjectId::null(repo.object_hash());
let res = group.match_remotes(
Some(git_refspec::match_group::Item {
full_ref_name: branch.as_bstr(),
target: branch_id.unwrap_or(&null),
object: None,
})
.into_iter(),
);
if !res.mappings.is_empty() {
let mut metadata = git_config::file::Metadata::from(git_config::Source::Local);
let config_path = remote.repo.git_dir().join("config");
metadata.path = Some(config_path.clone());
let mut config =
git_config::File::from_paths_metadata(Some(metadata), Default::default())?.expect("one file to load");
let mut section = config
.new_section("branch", Some(Cow::Owned(short_name.into())))
.expect("section header name is always valid per naming rules, our input branch name is valid");
section.push("remote".try_into().expect("valid at compile time"), Some(remote_name));
section.push(
"merge".try_into().expect("valid at compile time"),
Some(branch.as_bstr()),
);
std::fs::write(config_path, config.to_bstring())?;
replace_changed_local_config_file(repo, config);
}
Ok(())
}
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103
pub fn main_worktree(
&mut self,
mut progress: impl crate::Progress,
should_interrupt: &AtomicBool,
) -> Result<(Repository, git_worktree::index::checkout::Outcome), Error> {
let repo = self
.repo
.as_ref()
.expect("still present as we never succeeded the worktree checkout yet");
let workdir = repo.work_dir().ok_or_else(|| Error::BareRepository {
git_dir: repo.git_dir().to_owned(),
})?;
let root_tree = match repo.head()?.peel_to_id_in_place().transpose()? {
Some(id) => id.object().expect("downloaded from remote").peel_to_tree()?.id,
None => {
return Ok((
self.repo.take().expect("still present"),
git_worktree::index::checkout::Outcome::default(),
))
}
};
let index = git_index::State::from_tree(&root_tree, |oid, buf| repo.objects.find_tree_iter(oid, buf).ok())
.map_err(|err| Error::IndexFromTree {
id: root_tree,
source: err,
})?;
let mut index = git_index::File::from_state(index, repo.index_path());
let mut opts = repo.config.checkout_options(repo.git_dir())?;
opts.destination_is_initially_empty = true;
let mut files = progress.add_child_with_id("checkout", *b"CLCF"); /* CLone Checkout Files */
let mut bytes = progress.add_child_with_id("writing", *b"CLCB") /* CLone Checkout Bytes */;
files.init(Some(index.entries().len()), crate::progress::count("files"));
bytes.init(None, crate::progress::bytes());
let start = std::time::Instant::now();
let outcome = git_worktree::index::checkout(
&mut index,
workdir,
{
let objects = repo.objects.clone().into_arc()?;
move |oid, buf| objects.find_blob(oid, buf)
},
&mut files,
&mut bytes,
should_interrupt,
opts,
)?;
files.show_throughput(start);
bytes.show_throughput(start);
index.write(Default::default())?;
Ok((self.repo.take().expect("still present"), outcome))
}
sourcepub fn git_dir_trust(&self) -> Trust
pub fn git_dir_trust(&self) -> Trust
The trust we place in the git-dir, with lower amounts of trust causing access to configuration to be limited.
sourcepub fn common_dir(&self) -> &Path
pub fn common_dir(&self) -> &Path
Returns the main git repository if this is a repository on a linked work-tree, or the git_dir
itself.
Examples found in repository?
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38
pub fn worktrees(&self) -> std::io::Result<Vec<worktree::Proxy<'_>>> {
let mut res = Vec::new();
let iter = match std::fs::read_dir(self.common_dir().join("worktrees")) {
Ok(iter) => iter,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(res),
Err(err) => return Err(err),
};
for entry in iter {
let entry = entry?;
let worktree_git_dir = entry.path();
if worktree_git_dir.join("gitdir").is_file() {
res.push(worktree::Proxy {
parent: self,
git_dir: worktree_git_dir,
})
}
}
res.sort_by(|a, b| a.git_dir.cmp(&b.git_dir));
Ok(res)
}
}
/// Interact with individual worktrees and their information.
impl crate::Repository {
/// Return the repository owning the main worktree, typically from a linked worktree.
///
/// Note that it might be the one that is currently open if this repository doesn't point to a linked worktree.
/// Also note that the main repo might be bare.
pub fn main_repo(&self) -> Result<crate::Repository, crate::open::Error> {
crate::ThreadSafeRepository::open_opts(self.common_dir(), self.options.clone()).map(Into::into)
}
sourcepub fn index_path(&self) -> PathBuf
pub fn index_path(&self) -> PathBuf
Return the path to the worktree index file, which may or may not exist.
Examples found in repository?
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
pub fn open_index(&self) -> Result<git_index::File, worktree::open_index::Error> {
let thread_limit = self
.config
.resolved
.boolean("index", None, "threads")
.map(|res| {
res.map(|value| usize::from(!value)).or_else(|err| {
git_config::Integer::try_from(err.input.as_ref())
.map_err(|err| worktree::open_index::Error::ConfigIndexThreads {
value: err.input.clone(),
err,
})
.map(|value| value.to_decimal().and_then(|v| v.try_into().ok()).unwrap_or(1))
})
})
.transpose()?;
git_index::File::at(
self.index_path(),
self.object_hash(),
git_index::decode::Options {
thread_limit,
min_extension_block_in_bytes_for_threading: 0,
},
)
.map_err(Into::into)
}
/// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file
/// on disk has changed.
///
/// The index file is shared across all clones of this repository.
pub fn index(&self) -> Result<worktree::Index, worktree::open_index::Error> {
self.index
.recent_snapshot(
|| self.index_path().metadata().and_then(|m| m.modified()).ok(),
|| {
self.open_index().map(Some).or_else(|err| match err {
worktree::open_index::Error::IndexFile(git_index::file::init::Error::Io(err))
if err.kind() == std::io::ErrorKind::NotFound =>
{
Ok(None)
}
err => Err(err),
})
},
)
.and_then(|opt| match opt {
Some(index) => Ok(index),
None => Err(worktree::open_index::Error::IndexFile(
git_index::file::init::Error::Io(std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("Could not find index file at {:?} for opening.", self.index_path()),
)),
)),
})
}
More examples
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103
pub fn main_worktree(
&mut self,
mut progress: impl crate::Progress,
should_interrupt: &AtomicBool,
) -> Result<(Repository, git_worktree::index::checkout::Outcome), Error> {
let repo = self
.repo
.as_ref()
.expect("still present as we never succeeded the worktree checkout yet");
let workdir = repo.work_dir().ok_or_else(|| Error::BareRepository {
git_dir: repo.git_dir().to_owned(),
})?;
let root_tree = match repo.head()?.peel_to_id_in_place().transpose()? {
Some(id) => id.object().expect("downloaded from remote").peel_to_tree()?.id,
None => {
return Ok((
self.repo.take().expect("still present"),
git_worktree::index::checkout::Outcome::default(),
))
}
};
let index = git_index::State::from_tree(&root_tree, |oid, buf| repo.objects.find_tree_iter(oid, buf).ok())
.map_err(|err| Error::IndexFromTree {
id: root_tree,
source: err,
})?;
let mut index = git_index::File::from_state(index, repo.index_path());
let mut opts = repo.config.checkout_options(repo.git_dir())?;
opts.destination_is_initially_empty = true;
let mut files = progress.add_child_with_id("checkout", *b"CLCF"); /* CLone Checkout Files */
let mut bytes = progress.add_child_with_id("writing", *b"CLCB") /* CLone Checkout Bytes */;
files.init(Some(index.entries().len()), crate::progress::count("files"));
bytes.init(None, crate::progress::bytes());
let start = std::time::Instant::now();
let outcome = git_worktree::index::checkout(
&mut index,
workdir,
{
let objects = repo.objects.clone().into_arc()?;
move |oid, buf| objects.find_blob(oid, buf)
},
&mut files,
&mut bytes,
should_interrupt,
opts,
)?;
files.show_throughput(start);
bytes.show_throughput(start);
index.write(Default::default())?;
Ok((self.repo.take().expect("still present"), outcome))
}
sourcepub fn path(&self) -> &Path
pub fn path(&self) -> &Path
The path to the .git
directory itself, or equivalent if this is a bare repository.
Examples found in repository?
More examples
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43
pub fn state(&self) -> Option<state::InProgress> {
let git_dir = self.path();
// This is modeled on the logic from wt_status_get_state in git's wt-status.c and
// ps1 from git-prompt.sh.
if git_dir.join("rebase-apply/applying").is_file() {
Some(state::InProgress::ApplyMailbox)
} else if git_dir.join("rebase-apply/rebasing").is_file() {
Some(state::InProgress::Rebase)
} else if git_dir.join("rebase-apply").is_dir() {
Some(state::InProgress::ApplyMailboxRebase)
} else if git_dir.join("rebase-merge/interactive").is_file() {
Some(state::InProgress::RebaseInteractive)
} else if git_dir.join("rebase-merge").is_dir() {
Some(state::InProgress::Rebase)
} else if git_dir.join("CHERRY_PICK_HEAD").is_file() {
if git_dir.join("sequencer/todo").is_file() {
Some(state::InProgress::CherryPickSequence)
} else {
Some(state::InProgress::CherryPick)
}
} else if git_dir.join("MERGE_HEAD").is_file() {
Some(state::InProgress::Merge)
} else if git_dir.join("BISECT_LOG").is_file() {
Some(state::InProgress::Bisect)
} else if git_dir.join("REVERT_HEAD").is_file() {
if git_dir.join("sequencer/todo").is_file() {
Some(state::InProgress::RevertSequence)
} else {
Some(state::InProgress::Revert)
}
} else {
None
}
}
sourcepub fn work_dir(&self) -> Option<&Path>
pub fn work_dir(&self) -> Option<&Path>
Return the work tree containing all checked out files, if there is one.
Examples found in repository?
More examples
258 259 260 261 262 263 264 265 266 267 268 269 270
fn worktree_branches(repo: &Repository) -> Result<BTreeMap<git_ref::FullName, PathBuf>, update::Error> {
let mut map = BTreeMap::new();
if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
map.insert(head_ref.inner.name, wt_dir.to_owned());
}
for proxy in repo.worktrees()? {
let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
map.insert(head_ref.inner.name, wt_dir.to_owned());
}
}
Ok(map)
}
302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339
fn index_lookup(&mut self, path: &BStr, stage: u8) -> Option<()> {
self.unset_disambiguate_call();
match self.repo.index() {
Ok(index) => match index.entry_by_path_and_stage(path, stage.into()) {
Some(entry) => {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(entry.id);
Some(())
}
None => {
let stage_hint = [0, 1, 2]
.iter()
.filter(|our_stage| **our_stage != stage)
.find_map(|stage| {
index
.entry_index_by_path_and_stage(path, (*stage).into())
.map(|_| (*stage).into())
});
let exists = self
.repo
.work_dir()
.map_or(false, |root| root.join(git_path::from_bstr(path)).exists());
self.err.push(Error::IndexLookup {
desired_path: path.into(),
desired_stage: stage.into(),
exists,
stage_hint,
});
None
}
},
Err(err) => {
self.err.push(err.into());
None
}
}
}
sourcepub fn install_dir(&self) -> Result<PathBuf>
pub fn install_dir(&self) -> Result<PathBuf>
The directory of the binary path of the current process.
Examples found in repository?
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108
pub fn open_mailmap_into(&self, target: &mut git_mailmap::Snapshot) -> Result<(), crate::mailmap::load::Error> {
let mut err = None::<crate::mailmap::load::Error>;
let mut buf = Vec::new();
let mut blob_id = self
.config
.resolved
.raw_value("mailmap", None, "blob")
.ok()
.and_then(|spec| {
// TODO: actually resolve this as spec (once we can do that)
git_hash::ObjectId::from_hex(spec.as_ref())
.map_err(|e| err.get_or_insert(e.into()))
.ok()
});
match self.work_dir() {
None => {
// TODO: replace with ref-spec `HEAD:.mailmap` for less verbose way of getting the blob id
blob_id = blob_id.or_else(|| {
self.head().ok().and_then(|mut head| {
let commit = head.peel_to_commit_in_place().ok()?;
let tree = commit.tree().ok()?;
tree.lookup_entry(Some(".mailmap")).ok()?.map(|e| e.object_id())
})
});
}
Some(root) => {
if let Ok(mut file) = git_features::fs::open_options_no_follow()
.read(true)
.open(root.join(".mailmap"))
.map_err(|e| {
if e.kind() != std::io::ErrorKind::NotFound {
err.get_or_insert(e.into());
}
})
{
buf.clear();
std::io::copy(&mut file, &mut buf)
.map_err(|e| err.get_or_insert(e.into()))
.ok();
target.merge(git_mailmap::parse_ignore_errors(&buf));
}
}
}
if let Some(blob) = blob_id.and_then(|id| self.find_object(id).map_err(|e| err.get_or_insert(e.into())).ok()) {
target.merge(git_mailmap::parse_ignore_errors(&blob.data));
}
let configured_path = self
.config
.resolved
.value::<git_config::Path<'_>>("mailmap", None, "file")
.ok()
.and_then(|path| {
let install_dir = self.install_dir().ok()?;
let home = self.config.home_dir();
match path.interpolate(git_config::path::interpolate::Context {
git_install_dir: Some(install_dir.as_path()),
home_dir: home.as_deref(),
home_for_user: if self.options.git_dir_trust.expect("trust is set") == git_sec::Trust::Full {
Some(git_config::path::interpolate::home_for_user)
} else {
None
},
}) {
Ok(path) => Some(path),
Err(e) => {
err.get_or_insert(e.into());
None
}
}
});
if let Some(mut file) =
configured_path.and_then(|path| std::fs::File::open(path).map_err(|e| err.get_or_insert(e.into())).ok())
{
buf.clear();
std::io::copy(&mut file, &mut buf)
.map_err(|e| err.get_or_insert(e.into()))
.ok();
target.merge(git_mailmap::parse_ignore_errors(&buf));
}
err.map(Err).unwrap_or(Ok(()))
}
sourcepub fn prefix(&self) -> Option<Result<PathBuf>>
pub fn prefix(&self) -> Option<Result<PathBuf>>
Returns the relative path which is the components between the working tree and the current working dir (CWD).
Note that there may be None
if there is no work tree, even though the PathBuf
will be empty
if the CWD is at the root of the work tree.
source§impl Repository
impl Repository
Methods related to object creation.
sourcepub fn find_object(&self, id: impl Into<ObjectId>) -> Result<Object<'_>, Error>
pub fn find_object(&self, id: impl Into<ObjectId>) -> Result<Object<'_>, Error>
Find the object with id
in the object database or return an error if it could not be found.
There are various legitimate reasons for an object to not be present, which is why
try_find_object(…)
might be preferable instead.
Performance Note
In order to get the kind of the object, is must be fully decoded from storage if it is packed with deltas. Loose object could be partially decoded, even though that’s not implemented.
Examples found in repository?
More examples
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
fn require_object_kind(repo: &Repository, obj: &git_hash::oid, kind: git_object::Kind) -> Result<(), Error> {
let obj = repo.find_object(obj)?;
if obj.kind == kind {
Ok(())
} else {
Err(Error::ObjectKind {
actual: obj.kind,
expected: kind,
oid: obj.id.attach(repo).shorten_or_id(),
})
}
}
if self.last_call_was_disambiguate_prefix[self.idx] {
self.unset_disambiguate_call();
if let Some(objs) = self.objs[self.idx].as_mut() {
let repo = self.repo;
let errors: Vec<_> = match hint {
Some(kind_hint) => match kind_hint {
ObjectKindHint::Treeish | ObjectKindHint::Committish => {
let kind = match kind_hint {
ObjectKindHint::Treeish => git_object::Kind::Tree,
ObjectKindHint::Committish => git_object::Kind::Commit,
_ => unreachable!("BUG: we narrow possibilities above"),
};
objs.iter()
.filter_map(|obj| peel(repo, obj, kind).err().map(|err| (*obj, err)))
.collect()
}
ObjectKindHint::Tree | ObjectKindHint::Commit | ObjectKindHint::Blob => {
let kind = match kind_hint {
ObjectKindHint::Tree => git_object::Kind::Tree,
ObjectKindHint::Commit => git_object::Kind::Commit,
ObjectKindHint::Blob => git_object::Kind::Blob,
_ => unreachable!("BUG: we narrow possibilities above"),
};
objs.iter()
.filter_map(|obj| require_object_kind(repo, obj, kind).err().map(|err| (*obj, err)))
.collect()
}
},
None => return,
};
if errors.len() == objs.len() {
self.err.extend(errors.into_iter().map(|(_, err)| err));
} else {
for (obj, err) in errors {
objs.remove(&obj);
self.err.push(err);
}
}
}
}
}
fn follow_refs_to_objects_if_needed(&mut self) -> Option<()> {
assert_eq!(self.refs.len(), self.objs.len());
let repo = self.repo;
for (r, obj) in self.refs.iter().zip(self.objs.iter_mut()) {
if let (_ref_opt @ Some(ref_), obj_opt @ None) = (r, obj) {
if let Some(id) = ref_.target.try_id().map(ToOwned::to_owned).or_else(|| {
ref_.clone()
.attach(repo)
.peel_to_id_in_place()
.ok()
.map(|id| id.detach())
}) {
obj_opt.get_or_insert_with(HashSet::default).insert(id);
};
};
}
Some(())
}
fn unset_disambiguate_call(&mut self) {
self.last_call_was_disambiguate_prefix[self.idx] = false;
}
}
fn peel(repo: &Repository, obj: &git_hash::oid, kind: git_object::Kind) -> Result<ObjectId, Error> {
let mut obj = repo.find_object(obj)?;
obj = obj.peel_to_kind(kind)?;
debug_assert_eq!(obj.kind, kind, "bug in Object::peel_to_kind() which didn't deliver");
Ok(obj.id)
}
40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
pub fn lookup_entry<I, P>(mut self, path: I) -> Result<Option<Entry<'repo>>, find::existing::Error>
where
I: IntoIterator<Item = P>,
P: PartialEq<BStr>,
{
let mut path = path.into_iter().peekable();
while let Some(component) = path.next() {
match TreeRefIter::from_bytes(&self.data)
.filter_map(Result::ok)
.find(|entry| component.eq(entry.filename))
{
Some(entry) => {
if path.peek().is_none() {
return Ok(Some(Entry {
inner: entry.into(),
repo: self.repo,
}));
} else {
let next_id = entry.oid.to_owned();
let repo = self.repo;
drop(self);
self = match repo.find_object(next_id)?.try_into_tree() {
Ok(tree) => tree,
Err(_) => return Ok(None),
};
}
}
None => return Ok(None),
}
}
Ok(None)
}
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92
pub fn peel_to_kind(mut self, kind: Kind) -> Result<Self, peel::to_kind::Error> {
loop {
match self.kind {
our_kind if kind == our_kind => {
return Ok(self);
}
Kind::Commit => {
let tree_id = self
.try_to_commit_ref_iter()
.expect("commit")
.tree_id()
.expect("valid commit");
let repo = self.repo;
drop(self);
self = repo.find_object(tree_id)?;
}
Kind::Tag => {
let target_id = self.to_tag_ref_iter().target_id().expect("valid tag");
let repo = self.repo;
drop(self);
self = repo.find_object(target_id)?;
}
Kind::Tree | Kind::Blob => {
return Err(peel::to_kind::Error::NotFound {
oid: self.id().shorten().unwrap_or_else(|_| self.id.into()),
actual: self.kind,
expected: kind,
})
}
}
}
}
/// Peel this object into a tree and return it, if this is possible.
pub fn peel_to_tree(self) -> Result<Tree<'repo>, peel::to_kind::Error> {
Ok(self.peel_to_kind(git_object::Kind::Tree)?.into_tree())
}
// TODO: tests
/// Follow all tag object targets until a commit, tree or blob is reached.
///
/// Note that this method is different from [`peel_to_kind(…)`][Object::peel_to_kind()] as it won't
/// peel commits to their tree, but handles tags only.
pub fn peel_tags_to_end(mut self) -> Result<Self, object::find::existing::Error> {
loop {
match self.kind {
Kind::Commit | Kind::Tree | Kind::Blob => break Ok(self),
Kind::Tag => {
let target_id = self.to_tag_ref_iter().target_id().expect("valid tag");
let repo = self.repo;
drop(self);
self = repo.find_object(target_id)?;
}
}
}
}
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105
pub(crate) fn ambiguous(candidates: HashSet<ObjectId>, prefix: git_hash::Prefix, repo: &Repository) -> Self {
#[derive(PartialOrd, Ord, Eq, PartialEq, Copy, Clone)]
enum Order {
Tag,
Commit,
Tree,
Blob,
Invalid,
}
let candidates = {
let mut c: Vec<_> = candidates
.into_iter()
.map(|oid| {
let obj = repo.find_object(oid);
let order = match &obj {
Err(_) => Order::Invalid,
Ok(obj) => match obj.kind {
git_object::Kind::Tag => Order::Tag,
git_object::Kind::Commit => Order::Commit,
git_object::Kind::Tree => Order::Tree,
git_object::Kind::Blob => Order::Blob,
},
};
(oid, obj, order)
})
.collect();
c.sort_by(|lhs, rhs| lhs.2.cmp(&rhs.2).then_with(|| lhs.0.cmp(&rhs.0)));
c
};
Error::AmbiguousPrefix {
prefix,
info: candidates
.into_iter()
.map(|(oid, find_result, _)| {
let info = match find_result {
Ok(obj) => match obj.kind {
git_object::Kind::Tree | git_object::Kind::Blob => CandidateInfo::Object { kind: obj.kind },
git_object::Kind::Tag => {
let tag = obj.to_tag_ref();
CandidateInfo::Tag { name: tag.name.into() }
}
git_object::Kind::Commit => {
use bstr::ByteSlice;
let commit = obj.to_commit_ref();
CandidateInfo::Commit {
date: commit.committer().time,
title: commit.message().title.trim().into(),
}
}
},
Err(err) => CandidateInfo::FindError { source: err },
};
(oid.attach(repo).shorten().unwrap_or_else(|_| oid.into()), info)
})
.collect(),
}
}
sourcepub fn try_find_object(
&self,
id: impl Into<ObjectId>
) -> Result<Option<Object<'_>>, Error>
pub fn try_find_object(
&self,
id: impl Into<ObjectId>
) -> Result<Option<Object<'_>>, Error>
Try to find the object with id
or return None
it it wasn’t found.
sourcepub fn write_object(&self, object: impl WriteTo) -> Result<Id<'_>, Error>
pub fn write_object(&self, object: impl WriteTo) -> Result<Id<'_>, Error>
Write the given object into the object database and return its object id.
Examples found in repository?
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173
pub fn tag(
&self,
name: impl AsRef<str>,
target: impl AsRef<git_hash::oid>,
target_kind: git_object::Kind,
tagger: Option<git_actor::SignatureRef<'_>>,
message: impl AsRef<str>,
constraint: PreviousValue,
) -> Result<Reference<'_>, tag::Error> {
let tag = git_object::Tag {
target: target.as_ref().into(),
target_kind,
name: name.as_ref().into(),
tagger: tagger.map(|t| t.to_owned()),
message: message.as_ref().into(),
pgp_signature: None,
};
let tag_id = self.write_object(&tag)?;
self.tag_reference(name, tag_id, constraint).map_err(Into::into)
}
/// Similar to [`commit(…)`][crate::Repository::commit()], but allows to create the commit with `committer` and `author` specified.
///
/// This forces setting the commit time and author time by hand. Note that typically, committer and author are the same.
pub fn commit_as<'a, 'c, Name, E>(
&self,
committer: impl Into<git_actor::SignatureRef<'c>>,
author: impl Into<git_actor::SignatureRef<'a>>,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>,
) -> Result<Id<'_>, commit::Error>
where
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
use git_ref::{
transaction::{Change, RefEdit},
Target,
};
// TODO: possibly use CommitRef to save a few allocations (but will have to allocate for object ids anyway.
// This can be made vastly more efficient though if we wanted to, so we lie in the API
let reference = reference.try_into()?;
let commit = git_object::Commit {
message: message.as_ref().into(),
tree: tree.into(),
author: author.into().to_owned(),
committer: committer.into().to_owned(),
encoding: None,
parents: parents.into_iter().map(|id| id.into()).collect(),
extra_headers: Default::default(),
};
let commit_id = self.write_object(&commit)?;
self.edit_reference(RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: crate::reference::log::message("commit", commit.message.as_ref(), commit.parents.len()),
},
expected: match commit.parents.first().map(|p| Target::Peeled(*p)) {
Some(previous) => {
if reference.as_bstr() == "HEAD" {
PreviousValue::MustExistAndMatch(previous)
} else {
PreviousValue::ExistingMustMatch(previous)
}
}
None => PreviousValue::MustNotExist,
},
new: Target::Peeled(commit_id.inner),
},
name: reference,
deref: true,
})?;
Ok(commit_id)
}
sourcepub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result<Id<'_>, Error>
pub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result<Id<'_>, Error>
Write a blob from the given bytes
.
sourcepub fn write_blob_stream(&self, bytes: impl Read + Seek) -> Result<Id<'_>, Error>
pub fn write_blob_stream(&self, bytes: impl Read + Seek) -> Result<Id<'_>, Error>
Write a blob from the given Read
implementation.
sourcepub fn tag(
&self,
name: impl AsRef<str>,
target: impl AsRef<oid>,
target_kind: Kind,
tagger: Option<SignatureRef<'_>>,
message: impl AsRef<str>,
constraint: PreviousValue
) -> Result<Reference<'_>, Error>
pub fn tag(
&self,
name: impl AsRef<str>,
target: impl AsRef<oid>,
target_kind: Kind,
tagger: Option<SignatureRef<'_>>,
message: impl AsRef<str>,
constraint: PreviousValue
) -> Result<Reference<'_>, Error>
Create a tag reference named name
(without refs/tags/
prefix) pointing to a newly created tag object
which in turn points to target
and return the newly created reference.
It will be created with constraint
which is most commonly to only create it
or to force overwriting a possibly existing tag.
sourcepub fn commit_as<'a, 'c, Name, E>(
&self,
committer: impl Into<SignatureRef<'c>>,
author: impl Into<SignatureRef<'a>>,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>
) -> Result<Id<'_>, Error>where
Name: TryInto<FullName, Error = E>,
Error: From<E>,
pub fn commit_as<'a, 'c, Name, E>(
&self,
committer: impl Into<SignatureRef<'c>>,
author: impl Into<SignatureRef<'a>>,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>
) -> Result<Id<'_>, Error>where
Name: TryInto<FullName, Error = E>,
Error: From<E>,
Similar to commit(…)
, but allows to create the commit with committer
and author
specified.
This forces setting the commit time and author time by hand. Note that typically, committer and author are the same.
Examples found in repository?
188 189 190 191 192 193 194 195 196 197 198 199 200 201 202
pub fn commit<Name, E>(
&self,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>,
) -> Result<Id<'_>, commit::Error>
where
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
let author = self.author_or_default();
let committer = self.committer_or_default();
self.commit_as(committer, author, reference, message, tree, parents)
}
sourcepub fn commit<Name, E>(
&self,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>
) -> Result<Id<'_>, Error>where
Name: TryInto<FullName, Error = E>,
Error: From<E>,
pub fn commit<Name, E>(
&self,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>
) -> Result<Id<'_>, Error>where
Name: TryInto<FullName, Error = E>,
Error: From<E>,
Create a new commit object with message
referring to tree
with parents
, and point reference
to it. The commit is written without message encoding field, which can be assumed to be UTF-8.
author
and committer
fields are pre-set from the configuration, which can be altered
temporarily before the call if required.
reference
will be created if it doesn’t exist, and can be "HEAD"
to automatically write-through to the symbolic reference
that HEAD
points to if it is not detached. For this reason, detached head states cannot be created unless the HEAD
is detached
already. The reflog will be written as canonical git would do, like <operation> (<detail>): <summary>
.
The first parent id in parents
is expected to be the current target of reference
and the operation will fail if it is not.
If there is no parent, the reference
is expected to not exist yet.
The method fails immediately if a reference
lock can’t be acquired.
sourcepub fn empty_tree(&self) -> Tree<'_>
pub fn empty_tree(&self) -> Tree<'_>
Return an empty tree object, suitable for getting changes.
Note that it is special and doesn’t physically exist in the object database even though it can be returned. This means that this object can be used in an uninitialized, empty repository which would report to have no objects at all.
source§impl Repository
impl Repository
Obtain and alter references comfortably
sourcepub fn tag_reference(
&self,
name: impl AsRef<str>,
target: impl Into<ObjectId>,
constraint: PreviousValue
) -> Result<Reference<'_>, Error>
pub fn tag_reference(
&self,
name: impl AsRef<str>,
target: impl Into<ObjectId>,
constraint: PreviousValue
) -> Result<Reference<'_>, Error>
Create a lightweight tag with given name
(and without refs/tags/
prefix) pointing to the given target
, and return it as reference.
It will be created with constraint
which is most commonly to only create it
or to force overwriting a possibly existing tag.
Examples found in repository?
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113
pub fn tag(
&self,
name: impl AsRef<str>,
target: impl AsRef<git_hash::oid>,
target_kind: git_object::Kind,
tagger: Option<git_actor::SignatureRef<'_>>,
message: impl AsRef<str>,
constraint: PreviousValue,
) -> Result<Reference<'_>, tag::Error> {
let tag = git_object::Tag {
target: target.as_ref().into(),
target_kind,
name: name.as_ref().into(),
tagger: tagger.map(|t| t.to_owned()),
message: message.as_ref().into(),
pgp_signature: None,
};
let tag_id = self.write_object(&tag)?;
self.tag_reference(name, tag_id, constraint).map_err(Into::into)
}
sourcepub fn namespace(&self) -> Option<&Namespace>
pub fn namespace(&self) -> Option<&Namespace>
Returns the currently set namespace for references, or None
if it is not set.
Namespaces allow to partition references, and is configured per Easy
.
sourcepub fn clear_namespace(&mut self) -> Option<Namespace>
pub fn clear_namespace(&mut self) -> Option<Namespace>
Remove the currently set reference namespace and return it, affecting only this Easy
.
sourcepub fn set_namespace<'a, Name, E>(
&mut self,
namespace: Name
) -> Result<Option<Namespace>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn set_namespace<'a, Name, E>(
&mut self,
namespace: Name
) -> Result<Option<Namespace>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Set the reference namespace to the given value, like "foo"
or "foo/bar"
.
Note that this value is shared across all Easy…
instances as the value is stored in the shared Repository
.
sourcepub fn reference<Name, E>(
&self,
name: Name,
target: impl Into<ObjectId>,
constraint: PreviousValue,
log_message: impl Into<BString>
) -> Result<Reference<'_>, Error>where
Name: TryInto<FullName, Error = E>,
Error: From<E>,
pub fn reference<Name, E>(
&self,
name: Name,
target: impl Into<ObjectId>,
constraint: PreviousValue,
log_message: impl Into<BString>
) -> Result<Reference<'_>, Error>where
Name: TryInto<FullName, Error = E>,
Error: From<E>,
Create a new reference with name
, like refs/heads/branch
, pointing to target
, adhering to constraint
during creation and writing log_message
into the reflog. Note that a ref-log will be written even if log_message
is empty.
The newly created Reference is returned.
Examples found in repository?
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
pub fn set_target_id(
&mut self,
id: impl Into<git_hash::ObjectId>,
reflog_message: impl Into<BString>,
) -> Result<(), Error> {
match &self.inner.target {
Target::Symbolic(name) => return Err(Error::SymbolicReference { name: name.clone() }),
Target::Peeled(current_id) => {
let changed = self.repo.reference(
self.name(),
id,
PreviousValue::MustExistAndMatch(Target::Peeled(current_id.to_owned())),
reflog_message,
)?;
*self = changed;
}
}
Ok(())
}
sourcepub fn edit_reference(&self, edit: RefEdit) -> Result<Vec<RefEdit>, Error>
pub fn edit_reference(&self, edit: RefEdit) -> Result<Vec<RefEdit>, Error>
Edit a single reference as described in edit
, and write reference logs as log_committer
.
One or more RefEdit
s are returned - symbolic reference splits can cause more edits to be performed. All edits have the previous
reference values set to the ones encountered at rest after acquiring the respective reference’s lock.
Examples found in repository?
61 62 63 64 65 66 67 68 69 70 71 72
pub fn delete(&self) -> Result<(), crate::reference::edit::Error> {
self.repo
.edit_reference(RefEdit {
change: Change::Delete {
expected: PreviousValue::MustExistAndMatch(self.inner.target.clone()),
log: RefLog::AndReference,
},
name: self.inner.name.clone(),
deref: false,
})
.map(|_| ())
}
More examples
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
pub fn tag_reference(
&self,
name: impl AsRef<str>,
target: impl Into<ObjectId>,
constraint: PreviousValue,
) -> Result<Reference<'_>, reference::edit::Error> {
let id = target.into();
let mut edits = self.edit_reference(RefEdit {
change: Change::Update {
log: Default::default(),
expected: constraint,
new: Target::Peeled(id),
},
name: format!("refs/tags/{}", name.as_ref()).try_into()?,
deref: false,
})?;
assert_eq!(edits.len(), 1, "reference splits should ever happen");
let edit = edits.pop().expect("exactly one item");
Ok(Reference {
inner: git_ref::Reference {
name: edit.name,
target: id.into(),
peeled: None,
},
repo: self,
})
}
/// Returns the currently set namespace for references, or `None` if it is not set.
///
/// Namespaces allow to partition references, and is configured per `Easy`.
pub fn namespace(&self) -> Option<&git_ref::Namespace> {
self.refs.namespace.as_ref()
}
/// Remove the currently set reference namespace and return it, affecting only this `Easy`.
pub fn clear_namespace(&mut self) -> Option<git_ref::Namespace> {
self.refs.namespace.take()
}
/// Set the reference namespace to the given value, like `"foo"` or `"foo/bar"`.
///
/// Note that this value is shared across all `Easy…` instances as the value is stored in the shared `Repository`.
pub fn set_namespace<'a, Name, E>(
&mut self,
namespace: Name,
) -> Result<Option<git_ref::Namespace>, git_validate::refname::Error>
where
Name: TryInto<&'a PartialNameRef, Error = E>,
git_validate::refname::Error: From<E>,
{
let namespace = git_ref::namespace::expand(namespace)?;
Ok(self.refs.namespace.replace(namespace))
}
// TODO: more tests or usage
/// Create a new reference with `name`, like `refs/heads/branch`, pointing to `target`, adhering to `constraint`
/// during creation and writing `log_message` into the reflog. Note that a ref-log will be written even if `log_message` is empty.
///
/// The newly created Reference is returned.
pub fn reference<Name, E>(
&self,
name: Name,
target: impl Into<ObjectId>,
constraint: PreviousValue,
log_message: impl Into<BString>,
) -> Result<Reference<'_>, reference::edit::Error>
where
Name: TryInto<FullName, Error = E>,
git_validate::reference::name::Error: From<E>,
{
let name = name.try_into().map_err(git_validate::reference::name::Error::from)?;
let id = target.into();
let mut edits = self.edit_reference(RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: log_message.into(),
},
expected: constraint,
new: Target::Peeled(id),
},
name,
deref: false,
})?;
assert_eq!(
edits.len(),
1,
"only one reference can be created, splits aren't possible"
);
Ok(git_ref::Reference {
name: edits.pop().expect("exactly one edit").name,
target: Target::Peeled(id),
peeled: None,
}
.attach(self))
}
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99
pub fn init_opts(
directory: impl AsRef<Path>,
kind: crate::create::Kind,
create_options: crate::create::Options,
mut open_options: crate::open::Options,
) -> Result<Self, Error> {
let path = crate::create::into(directory.as_ref(), kind, create_options)?;
let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories();
open_options.git_dir_trust = Some(git_sec::Trust::Full);
open_options.current_dir = std::env::current_dir()?.into();
let repo = ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, open_options)?;
let branch_name = repo
.config
.resolved
.string("init", None, "defaultBranch")
.unwrap_or_else(|| Cow::Borrowed(DEFAULT_BRANCH_NAME.into()));
if branch_name.as_ref() != DEFAULT_BRANCH_NAME {
let sym_ref: FullName =
format!("refs/heads/{branch_name}")
.try_into()
.map_err(|err| Error::InvalidBranchName {
name: branch_name.into_owned(),
source: err,
})?;
let mut repo = repo.to_thread_local();
let prev_write_reflog = repo.refs.write_reflog;
repo.refs.write_reflog = WriteReflog::Disable;
repo.edit_reference(RefEdit {
change: git_ref::transaction::Change::Update {
log: Default::default(),
expected: PreviousValue::Any,
new: Target::Symbolic(sym_ref),
},
name: "HEAD".try_into().expect("valid"),
deref: false,
})?;
repo.refs.write_reflog = prev_write_reflog;
}
Ok(repo)
}
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173
pub fn commit_as<'a, 'c, Name, E>(
&self,
committer: impl Into<git_actor::SignatureRef<'c>>,
author: impl Into<git_actor::SignatureRef<'a>>,
reference: Name,
message: impl AsRef<str>,
tree: impl Into<ObjectId>,
parents: impl IntoIterator<Item = impl Into<ObjectId>>,
) -> Result<Id<'_>, commit::Error>
where
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
use git_ref::{
transaction::{Change, RefEdit},
Target,
};
// TODO: possibly use CommitRef to save a few allocations (but will have to allocate for object ids anyway.
// This can be made vastly more efficient though if we wanted to, so we lie in the API
let reference = reference.try_into()?;
let commit = git_object::Commit {
message: message.as_ref().into(),
tree: tree.into(),
author: author.into().to_owned(),
committer: committer.into().to_owned(),
encoding: None,
parents: parents.into_iter().map(|id| id.into()).collect(),
extra_headers: Default::default(),
};
let commit_id = self.write_object(&commit)?;
self.edit_reference(RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: crate::reference::log::message("commit", commit.message.as_ref(), commit.parents.len()),
},
expected: match commit.parents.first().map(|p| Target::Peeled(*p)) {
Some(previous) => {
if reference.as_bstr() == "HEAD" {
PreviousValue::MustExistAndMatch(previous)
} else {
PreviousValue::ExistingMustMatch(previous)
}
}
None => PreviousValue::MustNotExist,
},
new: Target::Peeled(commit_id.inner),
},
name: reference,
deref: true,
})?;
Ok(commit_id)
}
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170
pub fn update_head(
repo: &mut Repository,
remote_refs: &[git_protocol::handshake::Ref],
reflog_message: &BStr,
remote_name: &BStr,
) -> Result<(), Error> {
use git_ref::{
transaction::{PreviousValue, RefEdit},
Target,
};
let (head_peeled_id, head_ref) = match remote_refs.iter().find_map(|r| {
Some(match r {
git_protocol::handshake::Ref::Symbolic {
full_ref_name,
target,
object,
} if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target)),
git_protocol::handshake::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => {
(Some(object.as_ref()), None)
}
git_protocol::handshake::Ref::Unborn { full_ref_name, target } if full_ref_name == "HEAD" => {
(None, Some(target))
}
_ => return None,
})
}) {
Some(t) => t,
None => return Ok(()),
};
let head: git_ref::FullName = "HEAD".try_into().expect("valid");
let reflog_message = || LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: reflog_message.to_owned(),
};
match head_ref {
Some(referent) => {
let referent: git_ref::FullName = referent.try_into().map_err(|err| Error::InvalidHeadRef {
head_ref_name: referent.to_owned(),
source: err,
})?;
repo.refs
.transaction()
.packed_refs(git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates(
Box::new(|oid, buf| {
repo.objects
.try_find(oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}),
))
.prepare(
{
let mut edits = vec![RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Symbolic(referent.clone()),
},
name: head.clone(),
deref: false,
}];
if let Some(head_peeled_id) = head_peeled_id {
edits.push(RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Peeled(head_peeled_id.to_owned()),
},
name: referent.clone(),
deref: false,
});
};
edits
},
git_lock::acquire::Fail::Immediately,
git_lock::acquire::Fail::Immediately,
)
.map_err(crate::reference::edit::Error::from)?
.commit(repo.committer_or_default())
.map_err(crate::reference::edit::Error::from)?;
if let Some(head_peeled_id) = head_peeled_id {
let mut log = reflog_message();
log.mode = RefLog::Only;
repo.edit_reference(RefEdit {
change: git_ref::transaction::Change::Update {
log,
expected: PreviousValue::Any,
new: Target::Peeled(head_peeled_id.to_owned()),
},
name: head,
deref: false,
})?;
}
setup_branch_config(repo, referent.as_ref(), head_peeled_id, remote_name)?;
}
None => {
repo.edit_reference(RefEdit {
change: git_ref::transaction::Change::Update {
log: reflog_message(),
expected: PreviousValue::Any,
new: Target::Peeled(
head_peeled_id
.expect("detached heads always point to something")
.to_owned(),
),
},
name: head,
deref: false,
})?;
}
};
Ok(())
}
sourcepub fn edit_references(
&self,
edits: impl IntoIterator<Item = RefEdit>
) -> Result<Vec<RefEdit>, Error>
pub fn edit_references(
&self,
edits: impl IntoIterator<Item = RefEdit>
) -> Result<Vec<RefEdit>, Error>
Edit one or more references as described by their edits
.
Note that one can set the committer name for use in the ref-log by temporarily
overriding the git-config.
Returns all reference edits, which might be more than where provided due the splitting of symbolic references, and whose previous (old) values are the ones seen on in storage after the reference was locked.
sourcepub fn head(&self) -> Result<Head<'_>, Error>
pub fn head(&self) -> Result<Head<'_>, Error>
Return the repository head, an abstraction to help dealing with the HEAD
reference.
The HEAD
reference can be in various states, for more information, the documentation of Head
.
Examples found in repository?
169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198
pub fn head_id(&self) -> Result<crate::Id<'_>, reference::head_id::Error> {
let mut head = self.head()?;
head.peel_to_id_in_place()
.ok_or_else(|| reference::head_id::Error::Unborn {
name: head.referent_name().expect("unborn").to_owned(),
})?
.map_err(Into::into)
}
/// Return the name to the symbolic reference `HEAD` points to, or `None` if the head is detached.
///
/// The difference to [`head_ref()`][Self::head_ref()] is that the latter requires the reference to exist,
/// whereas here we merely return a the name of the possibly unborn reference.
pub fn head_name(&self) -> Result<Option<FullName>, reference::find::existing::Error> {
Ok(self.head()?.referent_name().map(|n| n.to_owned()))
}
/// Return the reference that `HEAD` points to, or `None` if the head is detached or unborn.
pub fn head_ref(&self) -> Result<Option<Reference<'_>>, reference::find::existing::Error> {
Ok(self.head()?.try_into_referent())
}
/// Return the commit object the `HEAD` reference currently points to after peeling it fully.
///
/// Note that this may fail for various reasons, most notably because the repository
/// is freshly initialized and doesn't have any commits yet. It could also fail if the
/// head does not point to a commit.
pub fn head_commit(&self) -> Result<crate::Commit<'_>, reference::head_commit::Error> {
Ok(self.head()?.peel_to_commit_in_place()?)
}
More examples
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
fn reflog(&mut self, query: ReflogLookup) -> Option<()> {
self.unset_disambiguate_call();
match query {
ReflogLookup::Date(_date) => {
self.err.push(Error::Planned {
dependency: "remote handling and ref-specs are fleshed out more",
});
None
}
ReflogLookup::Entry(no) => {
let r = match &mut self.refs[self.idx] {
Some(r) => r.clone().attach(self.repo),
val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
Ok(Some(r)) => {
*val = Some(r.clone().detach());
r
}
Ok(None) => {
self.err.push(Error::UnbornHeadsHaveNoRefLog);
return None;
}
Err(err) => {
self.err.push(err.into());
return None;
}
},
};
let mut platform = r.log_iter();
match platform.rev().ok().flatten() {
Some(mut it) => match it.nth(no).and_then(Result::ok) {
Some(line) => {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(line.new_oid);
Some(())
}
None => {
let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
self.err.push(Error::RefLogEntryOutOfRange {
reference: r.detach(),
desired: no,
available,
});
None
}
},
None => {
self.err.push(Error::MissingRefLog {
reference: r.name().as_bstr().into(),
action: "lookup entry",
});
None
}
}
}
}
}
fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> {
self.unset_disambiguate_call();
fn prior_checkouts_iter<'a>(
platform: &'a mut git_ref::file::log::iter::Platform<'static, '_>,
) -> Result<impl Iterator<Item = (BString, ObjectId)> + 'a, Error> {
match platform.rev().ok().flatten() {
Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| {
line.message
.strip_prefix(b"checkout: moving from ")
.and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
.map(|from_branch| (from_branch.into(), line.previous_oid))
})),
None => Err(Error::MissingRefLog {
reference: "HEAD".into(),
action: "search prior checked out branch",
}),
}
}
let head = match self.repo.head() {
Ok(head) => head,
Err(err) => {
self.err.push(err.into());
return None;
}
};
match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) {
Ok(Some((ref_name, id))) => {
let id = match self.repo.find_reference(ref_name.as_bstr()) {
Ok(mut r) => {
let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
self.refs[self.idx] = Some(r.detach());
id
}
Err(_) => id,
};
self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id);
Some(())
}
Ok(None) => {
self.err.push(Error::PriorCheckoutOutOfRange {
desired: branch_no,
available: prior_checkouts_iter(&mut head.log_iter())
.map(|it| it.count())
.unwrap_or(0),
});
None
}
Err(err) => {
self.err.push(err);
None
}
}
}
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103
pub fn main_worktree(
&mut self,
mut progress: impl crate::Progress,
should_interrupt: &AtomicBool,
) -> Result<(Repository, git_worktree::index::checkout::Outcome), Error> {
let repo = self
.repo
.as_ref()
.expect("still present as we never succeeded the worktree checkout yet");
let workdir = repo.work_dir().ok_or_else(|| Error::BareRepository {
git_dir: repo.git_dir().to_owned(),
})?;
let root_tree = match repo.head()?.peel_to_id_in_place().transpose()? {
Some(id) => id.object().expect("downloaded from remote").peel_to_tree()?.id,
None => {
return Ok((
self.repo.take().expect("still present"),
git_worktree::index::checkout::Outcome::default(),
))
}
};
let index = git_index::State::from_tree(&root_tree, |oid, buf| repo.objects.find_tree_iter(oid, buf).ok())
.map_err(|err| Error::IndexFromTree {
id: root_tree,
source: err,
})?;
let mut index = git_index::File::from_state(index, repo.index_path());
let mut opts = repo.config.checkout_options(repo.git_dir())?;
opts.destination_is_initially_empty = true;
let mut files = progress.add_child_with_id("checkout", *b"CLCF"); /* CLone Checkout Files */
let mut bytes = progress.add_child_with_id("writing", *b"CLCB") /* CLone Checkout Bytes */;
files.init(Some(index.entries().len()), crate::progress::count("files"));
bytes.init(None, crate::progress::bytes());
let start = std::time::Instant::now();
let outcome = git_worktree::index::checkout(
&mut index,
workdir,
{
let objects = repo.objects.clone().into_arc()?;
move |oid, buf| objects.find_blob(oid, buf)
},
&mut files,
&mut bytes,
should_interrupt,
opts,
)?;
files.show_throughput(start);
bytes.show_throughput(start);
index.write(Default::default())?;
Ok((self.repo.take().expect("still present"), outcome))
}
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108
pub fn open_mailmap_into(&self, target: &mut git_mailmap::Snapshot) -> Result<(), crate::mailmap::load::Error> {
let mut err = None::<crate::mailmap::load::Error>;
let mut buf = Vec::new();
let mut blob_id = self
.config
.resolved
.raw_value("mailmap", None, "blob")
.ok()
.and_then(|spec| {
// TODO: actually resolve this as spec (once we can do that)
git_hash::ObjectId::from_hex(spec.as_ref())
.map_err(|e| err.get_or_insert(e.into()))
.ok()
});
match self.work_dir() {
None => {
// TODO: replace with ref-spec `HEAD:.mailmap` for less verbose way of getting the blob id
blob_id = blob_id.or_else(|| {
self.head().ok().and_then(|mut head| {
let commit = head.peel_to_commit_in_place().ok()?;
let tree = commit.tree().ok()?;
tree.lookup_entry(Some(".mailmap")).ok()?.map(|e| e.object_id())
})
});
}
Some(root) => {
if let Ok(mut file) = git_features::fs::open_options_no_follow()
.read(true)
.open(root.join(".mailmap"))
.map_err(|e| {
if e.kind() != std::io::ErrorKind::NotFound {
err.get_or_insert(e.into());
}
})
{
buf.clear();
std::io::copy(&mut file, &mut buf)
.map_err(|e| err.get_or_insert(e.into()))
.ok();
target.merge(git_mailmap::parse_ignore_errors(&buf));
}
}
}
if let Some(blob) = blob_id.and_then(|id| self.find_object(id).map_err(|e| err.get_or_insert(e.into())).ok()) {
target.merge(git_mailmap::parse_ignore_errors(&blob.data));
}
let configured_path = self
.config
.resolved
.value::<git_config::Path<'_>>("mailmap", None, "file")
.ok()
.and_then(|path| {
let install_dir = self.install_dir().ok()?;
let home = self.config.home_dir();
match path.interpolate(git_config::path::interpolate::Context {
git_install_dir: Some(install_dir.as_path()),
home_dir: home.as_deref(),
home_for_user: if self.options.git_dir_trust.expect("trust is set") == git_sec::Trust::Full {
Some(git_config::path::interpolate::home_for_user)
} else {
None
},
}) {
Ok(path) => Some(path),
Err(e) => {
err.get_or_insert(e.into());
None
}
}
});
if let Some(mut file) =
configured_path.and_then(|path| std::fs::File::open(path).map_err(|e| err.get_or_insert(e.into())).ok())
{
buf.clear();
std::io::copy(&mut file, &mut buf)
.map_err(|e| err.get_or_insert(e.into()))
.ok();
target.merge(git_mailmap::parse_ignore_errors(&buf));
}
err.map(Err).unwrap_or(Ok(()))
}
sourcepub fn head_id(&self) -> Result<Id<'_>, Error>
pub fn head_id(&self) -> Result<Id<'_>, Error>
Resolve the HEAD
reference, follow and peel its target and obtain its object id.
Note that this may fail for various reasons, most notably because the repository is freshly initialized and doesn’t have any commits yet.
Also note that the returned id is likely to point to a commit, but could also point to a tree or blob. It won’t, however, point to a tag as these are always peeled.
sourcepub fn head_name(&self) -> Result<Option<FullName>, Error>
pub fn head_name(&self) -> Result<Option<FullName>, Error>
Return the name to the symbolic reference HEAD
points to, or None
if the head is detached.
The difference to head_ref()
is that the latter requires the reference to exist,
whereas here we merely return a the name of the possibly unborn reference.
sourcepub fn head_ref(&self) -> Result<Option<Reference<'_>>, Error>
pub fn head_ref(&self) -> Result<Option<Reference<'_>>, Error>
Return the reference that HEAD
points to, or None
if the head is detached or unborn.
Examples found in repository?
258 259 260 261 262 263 264 265 266 267 268 269 270
fn worktree_branches(repo: &Repository) -> Result<BTreeMap<git_ref::FullName, PathBuf>, update::Error> {
let mut map = BTreeMap::new();
if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
map.insert(head_ref.inner.name, wt_dir.to_owned());
}
for proxy in repo.worktrees()? {
let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
map.insert(head_ref.inner.name, wt_dir.to_owned());
}
}
Ok(map)
}
More examples
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
pub(crate) fn one_round(
algo: Algorithm,
round: usize,
repo: &crate::Repository,
ref_map: &crate::remote::fetch::RefMap,
fetch_tags: crate::remote::fetch::Tags,
arguments: &mut git_protocol::fetch::Arguments,
_previous_response: Option<&git_protocol::fetch::Response>,
) -> Result<bool, Error> {
let tag_refspec_to_ignore = fetch_tags
.to_refspec()
.filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
match algo {
Algorithm::Naive => {
assert_eq!(round, 1, "Naive always finishes after the first round, and claims.");
let mut has_missing_tracking_branch = false;
for mapping in &ref_map.mappings {
if tag_refspec_to_ignore.map_or(false, |tag_spec| {
mapping
.spec_index
.implicit_index()
.and_then(|idx| ref_map.extra_refspecs.get(idx))
.map_or(false, |spec| spec.to_ref() == tag_spec)
}) {
continue;
}
let have_id = mapping.local.as_ref().and_then(|name| {
repo.find_reference(name)
.ok()
.and_then(|r| r.target().try_id().map(ToOwned::to_owned))
});
match have_id {
Some(have_id) => {
if let Some(want_id) = mapping.remote.as_id() {
if want_id != have_id {
arguments.want(want_id);
arguments.have(have_id);
}
}
}
None => {
if let Some(want_id) = mapping.remote.as_id() {
arguments.want(want_id);
has_missing_tracking_branch = true;
}
}
}
}
if has_missing_tracking_branch {
if let Ok(Some(r)) = repo.head_ref() {
if let Some(id) = r.target().try_id() {
arguments.have(id);
}
}
}
Ok(true)
}
}
}
sourcepub fn head_commit(&self) -> Result<Commit<'_>, Error>
pub fn head_commit(&self) -> Result<Commit<'_>, Error>
Return the commit object the HEAD
reference currently points to after peeling it fully.
Note that this may fail for various reasons, most notably because the repository is freshly initialized and doesn’t have any commits yet. It could also fail if the head does not point to a commit.
sourcepub fn find_reference<'a, Name, E>(
&self,
name: Name
) -> Result<Reference<'_>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn find_reference<'a, Name, E>(
&self,
name: Name
) -> Result<Reference<'_>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Find the reference with the given partial or full name
, like main
, HEAD
, heads/branch
or origin/other
,
or return an error if it wasn’t found.
Consider try_find_reference(…)
if the reference might not exist
without that being considered an error.
Examples found in repository?
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160
pub fn head(&self) -> Result<crate::Head<'_>, reference::find::existing::Error> {
let head = self.find_reference("HEAD")?;
Ok(match head.inner.target {
Target::Symbolic(branch) => match self.find_reference(&branch) {
Ok(r) => crate::head::Kind::Symbolic(r.detach()),
Err(reference::find::existing::Error::NotFound) => crate::head::Kind::Unborn(branch),
Err(err) => return Err(err),
},
Target::Peeled(target) => crate::head::Kind::Detached {
target,
peeled: head.inner.peeled,
},
}
.attach(self))
}
More examples
163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> {
self.unset_disambiguate_call();
fn prior_checkouts_iter<'a>(
platform: &'a mut git_ref::file::log::iter::Platform<'static, '_>,
) -> Result<impl Iterator<Item = (BString, ObjectId)> + 'a, Error> {
match platform.rev().ok().flatten() {
Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| {
line.message
.strip_prefix(b"checkout: moving from ")
.and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
.map(|from_branch| (from_branch.into(), line.previous_oid))
})),
None => Err(Error::MissingRefLog {
reference: "HEAD".into(),
action: "search prior checked out branch",
}),
}
}
let head = match self.repo.head() {
Ok(head) => head,
Err(err) => {
self.err.push(err.into());
return None;
}
};
match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) {
Ok(Some((ref_name, id))) => {
let id = match self.repo.find_reference(ref_name.as_bstr()) {
Ok(mut r) => {
let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
self.refs[self.idx] = Some(r.detach());
id
}
Err(_) => id,
};
self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id);
Some(())
}
Ok(None) => {
self.err.push(Error::PriorCheckoutOutOfRange {
desired: branch_no,
available: prior_checkouts_iter(&mut head.log_iter())
.map(|it| it.count())
.unwrap_or(0),
});
None
}
Err(err) => {
self.err.push(err);
None
}
}
}
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
pub(crate) fn one_round(
algo: Algorithm,
round: usize,
repo: &crate::Repository,
ref_map: &crate::remote::fetch::RefMap,
fetch_tags: crate::remote::fetch::Tags,
arguments: &mut git_protocol::fetch::Arguments,
_previous_response: Option<&git_protocol::fetch::Response>,
) -> Result<bool, Error> {
let tag_refspec_to_ignore = fetch_tags
.to_refspec()
.filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
match algo {
Algorithm::Naive => {
assert_eq!(round, 1, "Naive always finishes after the first round, and claims.");
let mut has_missing_tracking_branch = false;
for mapping in &ref_map.mappings {
if tag_refspec_to_ignore.map_or(false, |tag_spec| {
mapping
.spec_index
.implicit_index()
.and_then(|idx| ref_map.extra_refspecs.get(idx))
.map_or(false, |spec| spec.to_ref() == tag_spec)
}) {
continue;
}
let have_id = mapping.local.as_ref().and_then(|name| {
repo.find_reference(name)
.ok()
.and_then(|r| r.target().try_id().map(ToOwned::to_owned))
});
match have_id {
Some(have_id) => {
if let Some(want_id) = mapping.remote.as_id() {
if want_id != have_id {
arguments.want(want_id);
arguments.have(have_id);
}
}
}
None => {
if let Some(want_id) = mapping.remote.as_id() {
arguments.want(want_id);
has_missing_tracking_branch = true;
}
}
}
}
if has_missing_tracking_branch {
if let Ok(Some(r)) = repo.head_ref() {
if let Some(id) = r.target().try_id() {
arguments.have(id);
}
}
}
Ok(true)
}
}
}
sourcepub fn references(&self) -> Result<Platform<'_>, Error>
pub fn references(&self) -> Result<Platform<'_>, Error>
Return a platform for iterating references.
Examples found in repository?
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
fn names(&self, repo: &Repository) -> Result<HashMap<ObjectId, Cow<'static, BStr>>, Error> {
let platform = repo.references()?;
Ok(match self {
SelectRef::AllTags | SelectRef::AllRefs => {
let mut refs: Vec<_> = match self {
SelectRef::AllRefs => platform.all()?,
SelectRef::AllTags => platform.tags()?,
_ => unreachable!(),
}
.filter_map(Result::ok)
.filter_map(|mut r: crate::Reference<'_>| {
let target_id = r.target().try_id().map(ToOwned::to_owned);
let peeled_id = r.peel_to_id_in_place().ok()?;
let (prio, tag_time) = match target_id {
Some(target_id) if peeled_id != *target_id => {
let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
(1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
}
_ => (0, 0),
};
(
peeled_id.inner,
prio,
tag_time,
Cow::from(r.inner.name.shorten().to_owned()),
)
.into()
})
.collect();
// By priority, then by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
refs.sort_by(
|(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| {
a_prio
.cmp(b_prio)
.then_with(|| a_time.cmp(b_time))
.then_with(|| b_name.cmp(a_name))
},
);
refs.into_iter().map(|(a, _, _, b)| (a, b)).collect()
}
SelectRef::AnnotatedTags => {
let mut peeled_commits_and_tag_date: Vec<_> = platform
.tags()?
.filter_map(Result::ok)
.filter_map(|r: crate::Reference<'_>| {
// TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
// so rather follow symrefs till the first object and then peel tags after the first object was found.
let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
let tag_time = tag
.tagger()
.ok()
.and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
.unwrap_or(0);
let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
})
.collect();
// Sort by time ascending, then lexicographically.
// More recent entries overwrite older ones due to collection into hashmap.
peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| {
a_time.cmp(b_time).then_with(|| b_name.cmp(a_name))
});
peeled_commits_and_tag_date
.into_iter()
.map(|(a, _, c)| (a, c))
.collect()
}
})
}
More examples
156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> {
self.unset_disambiguate_call();
self.follow_refs_to_objects_if_needed()?;
#[cfg(not(feature = "regex"))]
let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
#[cfg(feature = "regex")]
let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
Ok(compiled) => {
let needs_regex = regex::escape(compiled.as_str()) != regex;
move |message: &BStr| -> bool {
if needs_regex {
compiled.is_match(message) ^ negated
} else {
message.contains_str(regex) ^ negated
}
}
}
Err(err) => {
self.err.push(err.into());
return None;
}
};
match self.objs[self.idx].as_mut() {
Some(objs) => {
let repo = self.repo;
let mut errors = Vec::new();
let mut replacements = Replacements::default();
for oid in objs.iter() {
match oid
.attach(repo)
.ancestors()
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
replacements.push((*oid, commit.id));
matched = true;
break;
}
}
Err(err) => errors.push((*oid, err)),
}
}
if !matched {
errors.push((
*oid,
Error::NoRegexMatch {
regex: regex.into(),
commits_searched: count,
oid: oid.attach(repo).shorten_or_id(),
},
))
}
}
Err(err) => errors.push((*oid, err.into())),
}
}
handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
}
None => match self.repo.references() {
Ok(references) => match references.all() {
Ok(references) => {
match self
.repo
.rev_walk(
references
.peeled()
.filter_map(Result::ok)
.filter(|r| {
r.id()
.object()
.ok()
.map(|obj| obj.kind == git_object::Kind::Commit)
.unwrap_or(false)
})
.filter_map(|r| r.detach().peeled),
)
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(commit.id);
matched = true;
break;
}
}
Err(err) => self.err.push(err),
}
}
if matched {
Some(())
} else {
self.err.push(Error::NoRegexMatchAllRefs {
regex: regex.into(),
commits_searched: count,
});
None
}
}
Err(err) => {
self.err.push(err.into());
None
}
}
}
Err(err) => {
self.err.push(err.into());
None
}
},
Err(err) => {
self.err.push(err.into());
None
}
},
}
}
sourcepub fn try_find_reference<'a, Name, E>(
&self,
name: Name
) -> Result<Option<Reference<'_>>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
pub fn try_find_reference<'a, Name, E>(
&self,
name: Name
) -> Result<Option<Reference<'_>>, Error>where
Name: TryInto<&'a PartialNameRef, Error = E>,
Error: From<E>,
Try to find the reference named name
, like main
, heads/branch
, HEAD
or origin/other
, and return it.
Otherwise return None
if the reference wasn’t found.
If the reference is expected to exist, use find_reference()
.
Examples found in repository?
More examples
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
pub(crate) fn update(
repo: &Repository,
message: RefLogMessage,
mappings: &[fetch::Mapping],
refspecs: &[git_refspec::RefSpec],
extra_refspecs: &[git_refspec::RefSpec],
fetch_tags: fetch::Tags,
dry_run: fetch::DryRun,
write_packed_refs: fetch::WritePackedRefs,
) -> Result<update::Outcome, update::Error> {
let mut edits = Vec::new();
let mut updates = Vec::new();
let implicit_tag_refspec = fetch_tags
.to_refspec()
.filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
|fetch::Mapping {
remote,
local,
spec_index,
}| {
spec_index.get(refspecs, extra_refspecs).map(|spec| {
(
remote,
local,
spec,
implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
)
})
},
) {
let remote_id = match remote.as_id() {
Some(id) => id,
None => continue,
};
if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
let update = if is_implicit_tag {
update::Mode::ImplicitTagNotSentByRemote.into()
} else {
update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
};
updates.push(update);
continue;
}
let checked_out_branches = worktree_branches(repo)?;
let (mode, edit_index) = match local {
Some(name) => {
let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
Some(existing) => {
if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
let mode = update::Mode::RejectedCurrentlyCheckedOut {
worktree_dir: wt_dir.to_owned(),
};
updates.push(mode.into());
continue;
}
match existing.target() {
TargetRef::Symbolic(_) => {
updates.push(update::Mode::RejectedSymbolic.into());
continue;
}
TargetRef::Peeled(local_id) => {
let previous_value =
PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
let (mode, reflog_message) = if local_id == remote_id {
(update::Mode::NoChangeNeeded, "no update will be performed")
} else if let Some(git_ref::Category::Tag) = existing.name().category() {
if spec.allow_non_fast_forward() {
(update::Mode::Forced, "updating tag")
} else {
updates.push(update::Mode::RejectedTagUpdate.into());
continue;
}
} else {
let mut force = spec.allow_non_fast_forward();
let is_fast_forward = match dry_run {
fetch::DryRun::No => {
let ancestors = repo
.find_object(local_id)?
.try_into_commit()
.map_err(|_| ())
.and_then(|c| {
c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
}).and_then(|local_commit_time|
remote_id
.to_owned()
.ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
.sorting(
git_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
time_in_seconds_since_epoch: local_commit_time
},
)
.map_err(|_| ())
);
match ancestors {
Ok(mut ancestors) => {
ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
}
Err(_) => {
force = true;
false
}
}
}
fetch::DryRun::Yes => true,
};
if is_fast_forward {
(
update::Mode::FastForward,
matches!(dry_run, fetch::DryRun::Yes)
.then(|| "fast-forward (guessed in dry-run)")
.unwrap_or("fast-forward"),
)
} else if force {
(update::Mode::Forced, "forced-update")
} else {
updates.push(update::Mode::RejectedNonFastForward.into());
continue;
}
};
(mode, reflog_message, existing.name().to_owned(), previous_value)
}
}
}
None => {
let name: git_ref::FullName = name.try_into()?;
let reflog_msg = match name.category() {
Some(git_ref::Category::Tag) => "storing tag",
Some(git_ref::Category::LocalBranch) => "storing head",
_ => "storing ref",
};
(
update::Mode::New,
reflog_msg,
name,
PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
)
}
};
let edit = RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
message: message.compose(reflog_message),
},
expected: previous_value,
new: if let Source::Ref(git_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
match mappings.iter().find_map(|m| {
m.remote.as_name().and_then(|name| {
(name == target)
.then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
.flatten()
})
}) {
Some(local_branch) => {
// This is always safe because…
// - the reference may exist already
// - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
// - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
// target reference still exists and we can point to it.
Target::Symbolic(local_branch)
}
None => Target::Peeled(remote_id.into()),
}
} else {
Target::Peeled(remote_id.into())
},
},
name,
deref: false,
};
let edit_index = edits.len();
edits.push(edit);
(mode, Some(edit_index))
}
None => (update::Mode::NoChangeNeeded, None),
};
updates.push(Update { mode, edit_index })
}
let edits = match dry_run {
fetch::DryRun::No => {
let (file_lock_fail, packed_refs_lock_fail) = repo
.config
.lock_timeout()
.map_err(crate::reference::edit::Error::from)?;
repo.refs
.transaction()
.packed_refs(
match write_packed_refs {
fetch::WritePackedRefs::Only => {
git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
repo.objects
.try_find(oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}))},
fetch::WritePackedRefs::Never => git_ref::file::transaction::PackedRefs::DeletionsOnly
}
)
.prepare(edits, file_lock_fail, packed_refs_lock_fail)
.map_err(crate::reference::edit::Error::from)?
.commit(repo.committer_or_default())
.map_err(crate::reference::edit::Error::from)?
}
fetch::DryRun::Yes => edits,
};
Ok(update::Outcome { edits, updates })
}
source§impl Repository
impl Repository
sourcepub fn remote_at<Url, E>(&self, url: Url) -> Result<Remote<'_>, Error>where
Url: TryInto<Url, Error = E>,
Error: From<E>,
pub fn remote_at<Url, E>(&self, url: Url) -> Result<Remote<'_>, Error>where
Url: TryInto<Url, Error = E>,
Error: From<E>,
Create a new remote available at the given url
.
It’s configured to fetch included tags by default, similar to git.
See with_fetch_tags(…)
for a way to change it.
Examples found in repository?
37 38 39 40 41 42 43 44 45 46 47 48 49 50
pub fn remote(
&self,
direction: remote::Direction,
) -> Option<Result<crate::Remote<'repo>, remote::find::existing::Error>> {
// TODO: use `branch.<name>.merge`
self.remote_name(direction).map(|name| match name {
remote::Name::Symbol(name) => self.repo.find_remote(name.as_ref()).map_err(Into::into),
remote::Name::Url(url) => git_url::parse(url.as_ref()).map_err(Into::into).and_then(|url| {
self.repo
.remote_at(url)
.map_err(|err| remote::find::existing::Error::Find(remote::find::Error::Init(err)))
}),
})
}
More examples
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
pub fn fetch_only<P>(
&mut self,
progress: P,
should_interrupt: &std::sync::atomic::AtomicBool,
) -> Result<(Repository, crate::remote::fetch::Outcome), Error>
where
P: crate::Progress,
P::SubProgress: 'static,
{
use crate::remote;
use crate::{bstr::ByteVec, remote::fetch::RefLogMessage};
let repo = self
.repo
.as_mut()
.expect("user error: multiple calls are allowed only until it succeeds");
let remote_name = match self.remote_name.as_ref() {
Some(name) => name.to_owned(),
None => repo
.config
.resolved
.string_by_key("clone.defaultRemoteName")
.map(|n| remote::name::validated(n.into_owned()))
.unwrap_or_else(|| Ok("origin".into()))?,
};
let mut remote = repo
.remote_at(self.url.clone())?
.with_refspecs(
Some(format!("+refs/heads/*:refs/remotes/{remote_name}/*").as_str()),
remote::Direction::Fetch,
)
.expect("valid static spec");
let mut clone_fetch_tags = None;
if let Some(f) = self.configure_remote.as_mut() {
remote = f(remote).map_err(|err| Error::RemoteConfiguration(err))?;
} else {
clone_fetch_tags = remote::fetch::Tags::All.into();
}
let config = util::write_remote_to_local_config_file(&mut remote, remote_name.clone())?;
// Now we are free to apply remote configuration we don't want to be written to disk.
if let Some(fetch_tags) = clone_fetch_tags {
remote = remote.with_fetch_tags(fetch_tags);
}
// Add HEAD after the remote was written to config, we need it to know what to checkout later, and assure
// the ref that HEAD points to is present no matter what.
let head_refspec = git_refspec::parse(
format!("HEAD:refs/remotes/{remote_name}/HEAD").as_str().into(),
git_refspec::parse::Operation::Fetch,
)
.expect("valid")
.to_owned();
let pending_pack: remote::fetch::Prepare<'_, '_, _, _> =
remote.connect(remote::Direction::Fetch, progress)?.prepare_fetch({
let mut opts = self.fetch_options.clone();
if !opts.extra_refspecs.contains(&head_refspec) {
opts.extra_refspecs.push(head_refspec)
}
opts
})?;
if pending_pack.ref_map().object_hash != repo.object_hash() {
unimplemented!("configure repository to expect a different object hash as advertised by the server")
}
let reflog_message = {
let mut b = self.url.to_bstring();
b.insert_str(0, "clone: from ");
b
};
let outcome = pending_pack
.with_write_packed_refs_only(true)
.with_reflog_message(RefLogMessage::Override {
message: reflog_message.clone(),
})
.receive(should_interrupt)?;
util::replace_changed_local_config_file(repo, config);
util::update_head(
repo,
&outcome.ref_map.remote_refs,
reflog_message.as_ref(),
remote_name.as_ref(),
)?;
Ok((self.repo.take().expect("still present"), outcome))
}
sourcepub fn remote_at_without_url_rewrite<Url, E>(
&self,
url: Url
) -> Result<Remote<'_>, Error>where
Url: TryInto<Url, Error = E>,
Error: From<E>,
pub fn remote_at_without_url_rewrite<Url, E>(
&self,
url: Url
) -> Result<Remote<'_>, Error>where
Url: TryInto<Url, Error = E>,
Error: From<E>,
Create a new remote available at the given url
similarly to remote_at()
,
but don’t rewrite the url according to rewrite rules.
This eliminates a failure mode in case the rewritten URL is faulty, allowing to selectively apply rewrite
rules later and do so non-destructively.
sourcepub fn find_remote<'a>(
&self,
name_or_url: impl Into<&'a BStr>
) -> Result<Remote<'_>, Error>
pub fn find_remote<'a>(
&self,
name_or_url: impl Into<&'a BStr>
) -> Result<Remote<'_>, Error>
Find the remote with the given name_or_url
or report an error, similar to try_find_remote(…)
.
Note that we will obtain remotes only if we deem them trustworthy.
Examples found in repository?
More examples
37 38 39 40 41 42 43 44 45 46 47 48 49 50
pub fn remote(
&self,
direction: remote::Direction,
) -> Option<Result<crate::Remote<'repo>, remote::find::existing::Error>> {
// TODO: use `branch.<name>.merge`
self.remote_name(direction).map(|name| match name {
remote::Name::Symbol(name) => self.repo.find_remote(name.as_ref()).map_err(Into::into),
remote::Name::Url(url) => git_url::parse(url.as_ref()).map_err(Into::into).and_then(|url| {
self.repo
.remote_at(url)
.map_err(|err| remote::find::existing::Error::Find(remote::find::Error::Init(err)))
}),
})
}
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
fn setup_branch_config(
repo: &mut Repository,
branch: &FullNameRef,
branch_id: Option<&git_hash::oid>,
remote_name: &BStr,
) -> Result<(), Error> {
let short_name = match branch.category_and_short_name() {
Some((cat, shortened)) if cat == git_ref::Category::LocalBranch => match shortened.to_str() {
Ok(s) => s,
Err(_) => return Ok(()),
},
_ => return Ok(()),
};
let remote = repo
.find_remote(remote_name)
.expect("remote was just created and must be visible in config");
let group = git_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(|s| s.to_ref()));
let null = git_hash::ObjectId::null(repo.object_hash());
let res = group.match_remotes(
Some(git_refspec::match_group::Item {
full_ref_name: branch.as_bstr(),
target: branch_id.unwrap_or(&null),
object: None,
})
.into_iter(),
);
if !res.mappings.is_empty() {
let mut metadata = git_config::file::Metadata::from(git_config::Source::Local);
let config_path = remote.repo.git_dir().join("config");
metadata.path = Some(config_path.clone());
let mut config =
git_config::File::from_paths_metadata(Some(metadata), Default::default())?.expect("one file to load");
let mut section = config
.new_section("branch", Some(Cow::Owned(short_name.into())))
.expect("section header name is always valid per naming rules, our input branch name is valid");
section.push("remote".try_into().expect("valid at compile time"), Some(remote_name));
section.push(
"merge".try_into().expect("valid at compile time"),
Some(branch.as_bstr()),
);
std::fs::write(config_path, config.to_bstring())?;
replace_changed_local_config_file(repo, config);
}
Ok(())
}
sourcepub fn find_default_remote(
&self,
direction: Direction
) -> Option<Result<Remote<'_>, Error>>
pub fn find_default_remote(
&self,
direction: Direction
) -> Option<Result<Remote<'_>, Error>>
Find the default remote as configured, or None
if no such configuration could be found.
See remote_default_name() for more information on the direction
parameter.
sourcepub fn try_find_remote<'a>(
&self,
name_or_url: impl Into<&'a BStr>
) -> Option<Result<Remote<'_>, Error>>
pub fn try_find_remote<'a>(
&self,
name_or_url: impl Into<&'a BStr>
) -> Option<Result<Remote<'_>, Error>>
Find the remote with the given name_or_url
or return None
if it doesn’t exist, for the purpose of fetching or pushing
data to a remote.
There are various error kinds related to partial information or incorrectly formatted URLs or ref-specs.
Also note that the created Remote
may have neither fetch nor push ref-specs set at all.
Note that ref-specs are de-duplicated right away which may change their order. This doesn’t affect matching in any way as negations/excludes are applied after includes.
We will only include information if we deem it trustworthy.
sourcepub fn try_find_remote_without_url_rewrite<'a>(
&self,
name_or_url: impl Into<&'a BStr>
) -> Option<Result<Remote<'_>, Error>>
pub fn try_find_remote_without_url_rewrite<'a>(
&self,
name_or_url: impl Into<&'a BStr>
) -> Option<Result<Remote<'_>, Error>>
Similar to try_find_remote(), but removes a failure mode if rewritten URLs turn out to be invalid
as it skips rewriting them.
Use this in conjunction with Remote::rewrite_urls()
to non-destructively apply the rules and keep the failed urls unchanged.
source§impl Repository
impl Repository
Methods for resolving revisions by spec or working with the commit graph.
sourcepub fn rev_parse<'a>(&self, spec: impl Into<&'a BStr>) -> Result<Spec<'_>, Error>
pub fn rev_parse<'a>(&self, spec: impl Into<&'a BStr>) -> Result<Spec<'_>, Error>
Parse a revision specification and turn it into the object(s) it describes, similar to git rev-parse
.
Deviation
@
actually stands forHEAD
, whereasgit
resolves it to the object pointed to byHEAD
without making theHEAD
ref available for lookups.
sourcepub fn rev_parse_single<'repo, 'a>(
&'repo self,
spec: impl Into<&'a BStr>
) -> Result<Id<'repo>, Error>
pub fn rev_parse_single<'repo, 'a>(
&'repo self,
spec: impl Into<&'a BStr>
) -> Result<Id<'repo>, Error>
Parse a revision specification and return single object id as represented by this instance.
sourcepub fn rev_walk(
&self,
tips: impl IntoIterator<Item = impl Into<ObjectId>>
) -> Platform<'_>
pub fn rev_walk(
&self,
tips: impl IntoIterator<Item = impl Into<ObjectId>>
) -> Platform<'_>
Create the baseline for a revision walk by initializing it with the tips
to start iterating on.
It can be configured further before starting the actual walk.
Examples found in repository?
156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> {
self.unset_disambiguate_call();
self.follow_refs_to_objects_if_needed()?;
#[cfg(not(feature = "regex"))]
let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
#[cfg(feature = "regex")]
let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
Ok(compiled) => {
let needs_regex = regex::escape(compiled.as_str()) != regex;
move |message: &BStr| -> bool {
if needs_regex {
compiled.is_match(message) ^ negated
} else {
message.contains_str(regex) ^ negated
}
}
}
Err(err) => {
self.err.push(err.into());
return None;
}
};
match self.objs[self.idx].as_mut() {
Some(objs) => {
let repo = self.repo;
let mut errors = Vec::new();
let mut replacements = Replacements::default();
for oid in objs.iter() {
match oid
.attach(repo)
.ancestors()
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
replacements.push((*oid, commit.id));
matched = true;
break;
}
}
Err(err) => errors.push((*oid, err)),
}
}
if !matched {
errors.push((
*oid,
Error::NoRegexMatch {
regex: regex.into(),
commits_searched: count,
oid: oid.attach(repo).shorten_or_id(),
},
))
}
}
Err(err) => errors.push((*oid, err.into())),
}
}
handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
}
None => match self.repo.references() {
Ok(references) => match references.all() {
Ok(references) => {
match self
.repo
.rev_walk(
references
.peeled()
.filter_map(Result::ok)
.filter(|r| {
r.id()
.object()
.ok()
.map(|obj| obj.kind == git_object::Kind::Commit)
.unwrap_or(false)
})
.filter_map(|r| r.detach().peeled),
)
.sorting(Sorting::ByCommitTimeNewestFirst)
.all()
{
Ok(iter) => {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
res.map_err(Error::from).and_then(|commit_id| {
commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
})
});
for commit in commits {
count += 1;
match commit {
Ok(commit) => {
if matches(commit.message_raw_sloppy()) {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(commit.id);
matched = true;
break;
}
}
Err(err) => self.err.push(err),
}
}
if matched {
Some(())
} else {
self.err.push(Error::NoRegexMatchAllRefs {
regex: regex.into(),
commits_searched: count,
});
None
}
}
Err(err) => {
self.err.push(err.into());
None
}
}
}
Err(err) => {
self.err.push(err.into());
None
}
},
Err(err) => {
self.err.push(err.into());
None
}
},
}
}
source§impl Repository
impl Repository
sourcepub fn open_mailmap(&self) -> Snapshot
pub fn open_mailmap(&self) -> Snapshot
Similar to open_mailmap_into()
, but ignores all errors and returns at worst
an empty mailmap, e.g. if there is no mailmap or if there were errors loading them.
This represents typical usage within git, which also works with what’s there without considering a populated mailmap a reason to abort an operation, considering it optional.
sourcepub fn open_mailmap_into(&self, target: &mut Snapshot) -> Result<(), Error>
pub fn open_mailmap_into(&self, target: &mut Snapshot) -> Result<(), Error>
Try to merge mailmaps from the following locations into target
:
- read the
.mailmap
file without following symlinks from the working tree, if present - OR read
HEAD:.mailmap
if this repository is bare (i.e. has no working tree), if themailmap.blob
is not set. - read the mailmap as configured in
mailmap.blob
, if set. - read the file as configured by
mailmap.file
, following symlinks, if set.
Only the first error will be reported, and as many source mailmaps will be merged into target
as possible.
Parsing errors will be ignored.
source§impl Repository
impl Repository
sourcepub fn state(&self) -> Option<InProgress>
pub fn state(&self) -> Option<InProgress>
Returns the status of an in progress operation on a repository or None
if no operation is currently in progress.
Note to be confused with the repositories ‘status’.
source§impl Repository
impl Repository
Worktree iteration
sourcepub fn worktrees(&self) -> Result<Vec<Proxy<'_>>>
pub fn worktrees(&self) -> Result<Vec<Proxy<'_>>>
Return a list of all linked worktrees sorted by private git dir path as a lightweight proxy.
Note that these need additional processing to become usable, but provide a first glimpse a typical worktree information.
Examples found in repository?
258 259 260 261 262 263 264 265 266 267 268 269 270
fn worktree_branches(repo: &Repository) -> Result<BTreeMap<git_ref::FullName, PathBuf>, update::Error> {
let mut map = BTreeMap::new();
if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
map.insert(head_ref.inner.name, wt_dir.to_owned());
}
for proxy in repo.worktrees()? {
let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
map.insert(head_ref.inner.name, wt_dir.to_owned());
}
}
Ok(map)
}
source§impl Repository
impl Repository
Interact with individual worktrees and their information.
sourcepub fn main_repo(&self) -> Result<Repository, Error>
pub fn main_repo(&self) -> Result<Repository, Error>
Return the repository owning the main worktree, typically from a linked worktree.
Note that it might be the one that is currently open if this repository doesn’t point to a linked worktree. Also note that the main repo might be bare.
sourcepub fn worktree(&self) -> Option<Worktree<'_>>
pub fn worktree(&self) -> Option<Worktree<'_>>
Return the currently set worktree if there is one, acting as platform providing a validated worktree base path.
Note that there would be None
if this repository is bare
and the parent Repository
was instantiated without
registered worktree in the current working dir.
Examples found in repository?
72 73 74 75 76 77 78 79 80 81 82 83 84 85
pub fn kind(&self) -> crate::Kind {
match self.worktree() {
Some(wt) => {
if git_discover::is_submodule_git_dir(self.git_dir()) {
crate::Kind::Submodule
} else {
crate::Kind::WorkTree {
is_linked: !wt.is_main(),
}
}
}
None => crate::Kind::Bare,
}
}
sourcepub fn is_bare(&self) -> bool
pub fn is_bare(&self) -> bool
Return true if this repository is bare, and has no main work tree.
This is not to be confused with the worktree()
worktree, which may exists if this instance
was opened in a worktree that was created separately.
sourcepub fn open_index(&self) -> Result<File, Error>
pub fn open_index(&self) -> Result<File, Error>
Open a new copy of the index file and decode it entirely.
It will use the index.threads
configuration key to learn how many threads to use.
Note that it may fail if there is no index.
Examples found in repository?
More examples
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
pub fn index(&self) -> Result<worktree::Index, worktree::open_index::Error> {
self.index
.recent_snapshot(
|| self.index_path().metadata().and_then(|m| m.modified()).ok(),
|| {
self.open_index().map(Some).or_else(|err| match err {
worktree::open_index::Error::IndexFile(git_index::file::init::Error::Io(err))
if err.kind() == std::io::ErrorKind::NotFound =>
{
Ok(None)
}
err => Err(err),
})
},
)
.and_then(|opt| match opt {
Some(index) => Ok(index),
None => Err(worktree::open_index::Error::IndexFile(
git_index::file::init::Error::Io(std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("Could not find index file at {:?} for opening.", self.index_path()),
)),
)),
})
}
sourcepub fn index(&self) -> Result<Index, Error>
pub fn index(&self) -> Result<Index, Error>
Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file on disk has changed.
The index file is shared across all clones of this repository.
Examples found in repository?
More examples
302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339
fn index_lookup(&mut self, path: &BStr, stage: u8) -> Option<()> {
self.unset_disambiguate_call();
match self.repo.index() {
Ok(index) => match index.entry_by_path_and_stage(path, stage.into()) {
Some(entry) => {
self.objs[self.idx]
.get_or_insert_with(HashSet::default)
.insert(entry.id);
Some(())
}
None => {
let stage_hint = [0, 1, 2]
.iter()
.filter(|our_stage| **our_stage != stage)
.find_map(|stage| {
index
.entry_index_by_path_and_stage(path, (*stage).into())
.map(|_| (*stage).into())
});
let exists = self
.repo
.work_dir()
.map_or(false, |root| root.join(git_path::from_bstr(path)).exists());
self.err.push(Error::IndexLookup {
desired_path: path.into(),
desired_stage: stage.into(),
exists,
stage_hint,
});
None
}
},
Err(err) => {
self.err.push(err.into());
None
}
}
}
Trait Implementations§
source§impl Clone for Repository
impl Clone for Repository
source§impl Debug for Repository
impl Debug for Repository
source§impl From<&ThreadSafeRepository> for Repository
impl From<&ThreadSafeRepository> for Repository
source§fn from(repo: &ThreadSafeRepository) -> Self
fn from(repo: &ThreadSafeRepository) -> Self
source§impl From<PrepareCheckout> for Repository
impl From<PrepareCheckout> for Repository
source§fn from(prep: PrepareCheckout) -> Self
fn from(prep: PrepareCheckout) -> Self
source§impl From<PrepareFetch> for Repository
impl From<PrepareFetch> for Repository
source§fn from(prep: PrepareFetch) -> Self
fn from(prep: PrepareFetch) -> Self
source§impl From<Repository> for ThreadSafeRepository
impl From<Repository> for ThreadSafeRepository
source§fn from(r: Repository) -> Self
fn from(r: Repository) -> Self
source§impl From<ThreadSafeRepository> for Repository
impl From<ThreadSafeRepository> for Repository
source§fn from(repo: ThreadSafeRepository) -> Self
fn from(repo: ThreadSafeRepository) -> Self
source§impl PartialEq<Repository> for Repository
impl PartialEq<Repository> for Repository
source§fn eq(&self, other: &Repository) -> bool
fn eq(&self, other: &Repository) -> bool
self
and other
values to be equal, and is used
by ==
.