use std::collections::{BTreeSet, HashMap, HashSet};
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use crate::config::ConfigSet;
use crate::error::{Error, Result};
use crate::objects::ObjectId;
use crate::pack;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Ref {
Direct(ObjectId),
Symbolic(String),
}
pub fn read_ref_file(path: &Path) -> Result<Ref> {
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(e)
if e.kind() == io::ErrorKind::IsADirectory
|| e.raw_os_error() == Some(libc::EISDIR) =>
{
return Err(Error::Io(io::Error::new(io::ErrorKind::NotFound, e)));
}
Err(e) => return Err(Error::Io(e)),
};
let content = content.trim_end_matches('\n');
parse_ref_content(content)
}
pub(crate) fn parse_ref_content(content: &str) -> Result<Ref> {
if let Some(target) = content.strip_prefix("ref: ") {
Ok(Ref::Symbolic(target.trim().to_owned()))
} else if content.len() == 40 && content.chars().all(|c| c.is_ascii_hexdigit()) {
let oid: ObjectId = content.parse()?;
Ok(Ref::Direct(oid))
} else if content == "unknown-oid" {
const PLACEHOLDER: &[u8; 20] = b"GritUnknownOidPlc!X!";
let oid = ObjectId::from_bytes(PLACEHOLDER)?;
Ok(Ref::Direct(oid))
} else {
Err(Error::InvalidRef(content.to_owned()))
}
}
pub fn resolve_ref(git_dir: &Path, refname: &str) -> Result<ObjectId> {
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_resolve_ref(git_dir, refname);
}
let common = common_dir(git_dir);
resolve_ref_depth(git_dir, common.as_deref(), refname, 0)
}
pub fn common_dir(git_dir: &Path) -> Option<PathBuf> {
let commondir_file = git_dir.join("commondir");
let raw = fs::read_to_string(commondir_file).ok()?;
let rel = raw.trim();
let path = if Path::new(rel).is_absolute() {
PathBuf::from(rel)
} else {
git_dir.join(rel)
};
path.canonicalize().ok()
}
fn notes_merge_state_ref(refname: &str) -> bool {
matches!(refname, "NOTES_MERGE_REF" | "NOTES_MERGE_PARTIAL")
}
fn resolve_ref_depth(
git_dir: &Path,
common: Option<&Path>,
refname: &str,
depth: usize,
) -> Result<ObjectId> {
if depth > 10 {
return Err(Error::InvalidRef(format!(
"ref symlink too deep: {refname}"
)));
}
let storage_owned = crate::ref_namespace::storage_ref_name(refname);
let try_names: Vec<&str> =
if refname == "HEAD" && crate::ref_namespace::ref_storage_prefix().is_some() {
vec![storage_owned.as_str()]
} else if storage_owned != refname {
vec![storage_owned.as_str(), refname]
} else {
vec![refname]
};
for name in try_names {
let path = git_dir.join(name);
match read_ref_file(&path) {
Ok(Ref::Direct(oid)) => return Ok(oid),
Ok(Ref::Symbolic(target)) => {
return resolve_ref_depth(git_dir, common, &target, depth + 1);
}
Err(Error::Io(ref e)) if e.kind() == io::ErrorKind::NotFound => {}
Err(e) => return Err(e),
}
if let Some(cdir) = common {
if notes_merge_state_ref(name) {
} else if cdir != git_dir {
let cpath = cdir.join(name);
match read_ref_file(&cpath) {
Ok(Ref::Direct(oid)) => return Ok(oid),
Ok(Ref::Symbolic(target)) => {
return resolve_ref_depth(git_dir, common, &target, depth + 1);
}
Err(Error::Io(ref e)) if e.kind() == io::ErrorKind::NotFound => {}
Err(e) => return Err(e),
}
}
}
let packed_dir = common.unwrap_or(git_dir);
if let Some(oid) = lookup_packed_ref(packed_dir, name)? {
return Ok(oid);
}
if common.is_some() && common != Some(git_dir) {
if let Some(oid) = lookup_packed_ref(git_dir, name)? {
return Ok(oid);
}
}
}
Err(Error::InvalidRef(format!("ref not found: {refname}")))
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RawRefLookup {
Exists,
NotFound,
IsDirectory,
}
pub fn read_raw_ref(git_dir: &Path, refname: &str) -> Result<RawRefLookup> {
if crate::reftable::is_reftable_repo(git_dir) {
read_raw_ref_reftable(git_dir, refname)
} else {
read_raw_ref_files(git_dir, refname)
}
}
fn read_raw_ref_files(git_dir: &Path, refname: &str) -> Result<RawRefLookup> {
let common = common_dir(git_dir);
let storage_owned = crate::ref_namespace::storage_ref_name(refname);
let (names, n): ([&str; 2], usize) = if storage_owned != refname {
([storage_owned.as_str(), refname], 2)
} else {
([refname, refname], 1)
};
for name in names.iter().take(n) {
if let Some(lookup) = read_raw_ref_at(git_dir.join(name))? {
return Ok(lookup);
}
if let Some(cdir) = common.as_ref() {
if *cdir != git_dir && !notes_merge_state_ref(name) {
if let Some(lookup) = read_raw_ref_at(cdir.join(name))? {
return Ok(lookup);
}
}
}
let packed_dir = common.as_deref().unwrap_or(git_dir);
if packed_ref_name_exists(packed_dir, name)? {
return Ok(RawRefLookup::Exists);
}
if common.is_some()
&& common.as_deref() != Some(git_dir)
&& packed_ref_name_exists(git_dir, name)?
{
return Ok(RawRefLookup::Exists);
}
}
Ok(RawRefLookup::NotFound)
}
#[must_use]
pub fn lock_path_for_ref(path: &Path) -> PathBuf {
let mut s = path.as_os_str().to_owned();
s.push(".lock");
PathBuf::from(s)
}
fn read_raw_ref_at(path: PathBuf) -> Result<Option<RawRefLookup>> {
match fs::symlink_metadata(&path) {
Ok(meta) => {
if meta.is_dir() {
return Ok(Some(RawRefLookup::IsDirectory));
}
Ok(Some(RawRefLookup::Exists))
}
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(None),
Err(e) => Err(Error::Io(e)),
}
}
fn packed_ref_with_prefix(git_dir: &Path, prefix_with_slash: &str) -> Result<Option<String>> {
let packed = git_dir.join("packed-refs");
let content = match fs::read_to_string(&packed) {
Ok(c) => c,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(None),
Err(e) => return Err(Error::Io(e)),
};
let mut best: Option<String> = None;
for line in content.lines() {
if line.is_empty() || line.starts_with('#') || line.starts_with('^') {
continue;
}
let mut parts = line.split_whitespace();
let _oid = parts.next();
let Some(name) = parts.next() else {
continue;
};
let name = name.trim();
if name.starts_with(prefix_with_slash) {
let take = match &best {
None => true,
Some(b) => name < b.as_str(),
};
if take {
best = Some(name.to_owned());
}
}
}
Ok(best)
}
fn packed_ref_name_exists(git_dir: &Path, refname: &str) -> Result<bool> {
let packed = git_dir.join("packed-refs");
let content = match fs::read_to_string(&packed) {
Ok(c) => c,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(false),
Err(e) => return Err(Error::Io(e)),
};
for line in content.lines() {
if line.is_empty() || line.starts_with('#') || line.starts_with('^') {
continue;
}
let mut parts = line.split_whitespace();
let _oid = parts.next();
if let Some(name) = parts.next() {
if name == refname {
return Ok(true);
}
}
}
Ok(false)
}
fn refname_namespace_conflicts(existing: &str, candidate: &str) -> bool {
if existing == candidate {
return false;
}
existing
.strip_prefix(candidate)
.is_some_and(|rest| rest.starts_with('/'))
|| candidate
.strip_prefix(existing)
.is_some_and(|rest| rest.starts_with('/'))
}
fn packed_ref_namespace_conflict(git_dir: &Path, refname: &str) -> Result<bool> {
let packed = git_dir.join("packed-refs");
let content = match fs::read_to_string(&packed) {
Ok(c) => c,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(false),
Err(e) => return Err(Error::Io(e)),
};
for line in content.lines() {
if line.is_empty() || line.starts_with('#') || line.starts_with('^') {
continue;
}
let mut parts = line.split_whitespace();
let _oid = parts.next();
if let Some(name) = parts.next() {
if refname_namespace_conflicts(name, refname) {
return Ok(true);
}
}
}
Ok(false)
}
pub fn packed_refs_entry_exists(git_dir: &Path, refname: &str) -> Result<bool> {
if crate::reftable::is_reftable_repo(git_dir) || refname == "HEAD" {
return Ok(false);
}
let storage_dir = ref_storage_dir(git_dir, refname);
packed_ref_name_exists(&storage_dir, refname)
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum RefnameUnavailable {
AncestorExists {
blocking: String,
new_ref: String,
},
DescendantExists {
blocking: String,
new_ref: String,
},
SameBatch {
refname: String,
other: String,
},
}
impl RefnameUnavailable {
#[must_use]
pub fn lock_message_suffix(&self) -> String {
match self {
RefnameUnavailable::AncestorExists { blocking, new_ref } => {
format!("'{blocking}' exists; cannot create '{new_ref}'")
}
RefnameUnavailable::DescendantExists { blocking, new_ref } => {
format!("'{blocking}' exists; cannot create '{new_ref}'")
}
RefnameUnavailable::SameBatch { refname, other } => {
format!("cannot process '{refname}' and '{other}' at the same time")
}
}
}
}
fn find_descendant_in_sorted_extras(
dirname_with_slash: &str,
extras: &BTreeSet<String>,
) -> Option<String> {
let start = extras
.range(dirname_with_slash.to_string()..)
.next()
.cloned()?;
if start.starts_with(dirname_with_slash) {
Some(start)
} else {
None
}
}
pub fn verify_refname_available_for_create(
git_dir: &Path,
refname: &str,
extras: &BTreeSet<String>,
skip: &HashSet<String>,
) -> std::result::Result<(), RefnameUnavailable> {
let git_dir = fs::canonicalize(git_dir).unwrap_or_else(|_| git_dir.to_path_buf());
let mut seen_dirnames: HashSet<String> = HashSet::new();
let segments: Vec<&str> = refname.split('/').filter(|s| !s.is_empty()).collect();
if segments.len() <= 1 {
} else {
let mut dirname = String::new();
for part in &segments[..segments.len() - 1] {
if !dirname.is_empty() {
dirname.push('/');
}
dirname.push_str(part);
if !seen_dirnames.insert(dirname.clone()) {
continue;
}
if skip.contains(&dirname) {
continue;
}
match read_raw_ref(&git_dir, &dirname) {
Ok(RawRefLookup::Exists) => {
return Err(RefnameUnavailable::AncestorExists {
blocking: dirname.clone(),
new_ref: refname.to_owned(),
});
}
Ok(RawRefLookup::NotFound | RawRefLookup::IsDirectory) => {}
Err(_) => {}
}
if extras.contains(&dirname) {
return Err(RefnameUnavailable::SameBatch {
refname: refname.to_owned(),
other: dirname.clone(),
});
}
}
}
let mut leaf_dir = String::with_capacity(refname.len() + 1);
leaf_dir.push_str(refname);
leaf_dir.push('/');
let under = list_refs(&git_dir, &leaf_dir).unwrap_or_default();
if under.is_empty() {
let packed_dir = common_dir(&git_dir).unwrap_or_else(|| git_dir.clone());
if let Ok(Some(name)) = packed_ref_with_prefix(&packed_dir, &leaf_dir) {
if !skip.contains(&name) {
return Err(RefnameUnavailable::DescendantExists {
blocking: name,
new_ref: refname.to_owned(),
});
}
}
if packed_dir != git_dir {
if let Ok(Some(name)) = packed_ref_with_prefix(&git_dir, &leaf_dir) {
if !skip.contains(&name) {
return Err(RefnameUnavailable::DescendantExists {
blocking: name,
new_ref: refname.to_owned(),
});
}
}
}
}
if under.is_empty()
&& fs::symlink_metadata(git_dir.join(refname))
.map(|m| m.is_dir())
.unwrap_or(false)
{
let mut blocking: Option<String> = None;
let dir_path = git_dir.join(refname);
if let Ok(read) = fs::read_dir(&dir_path) {
for entry in read.flatten() {
let path = entry.path();
let Ok(meta) = fs::metadata(&path) else {
continue;
};
if !meta.is_file() {
continue;
}
let name = entry.file_name().to_string_lossy().into_owned();
let full = format!("{refname}/{name}");
blocking = Some(full);
break;
}
}
if let Some(b) = blocking {
if !skip.contains(&b) {
return Err(RefnameUnavailable::DescendantExists {
blocking: b,
new_ref: refname.to_owned(),
});
}
}
}
for (existing, _) in under {
if skip.contains(&existing) {
continue;
}
return Err(RefnameUnavailable::DescendantExists {
blocking: existing,
new_ref: refname.to_owned(),
});
}
if let Some(extra) = find_descendant_in_sorted_extras(&leaf_dir, extras) {
if !skip.contains(&extra) {
return Err(RefnameUnavailable::SameBatch {
refname: refname.to_owned(),
other: extra,
});
}
}
Ok(())
}
fn read_raw_ref_reftable(git_dir: &Path, refname: &str) -> Result<RawRefLookup> {
if refname == "HEAD" {
let head_path = git_dir.join("HEAD");
match fs::symlink_metadata(&head_path) {
Ok(meta) => {
if meta.is_dir() {
return Ok(RawRefLookup::IsDirectory);
}
return Ok(RawRefLookup::Exists);
}
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(RawRefLookup::NotFound),
Err(e) => return Err(Error::Io(e)),
}
}
if let Some(lookup) = read_raw_ref_at(git_dir.join(refname))? {
return Ok(lookup);
}
let stack = crate::reftable::ReftableStack::open(git_dir)?;
match stack.lookup_ref(refname)? {
Some(rec) => match rec.value {
crate::reftable::RefValue::Deletion => Ok(RawRefLookup::NotFound),
_ => Ok(RawRefLookup::Exists),
},
None => Ok(RawRefLookup::NotFound),
}
}
fn lookup_packed_ref(git_dir: &Path, refname: &str) -> Result<Option<ObjectId>> {
let packed = git_dir.join("packed-refs");
let content = match fs::read_to_string(&packed) {
Ok(c) => c,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(None),
Err(e) => return Err(Error::Io(e)),
};
for line in content.lines() {
if line.starts_with('#') || line.starts_with('^') {
continue;
}
let mut parts = line.splitn(2, ' ');
let hash = parts.next().unwrap_or("");
let name = parts.next().unwrap_or("").trim();
if name == refname && hash.len() == 40 {
let oid: ObjectId = hash.parse()?;
return Ok(Some(oid));
}
}
Ok(None)
}
pub fn write_symbolic_ref(git_dir: &Path, refname: &str, target: &str) -> Result<()> {
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_write_symref(git_dir, refname, target, None, None);
}
let storage_dir = ref_storage_dir(git_dir, refname);
if packed_ref_namespace_conflict(&storage_dir, refname)? {
return Err(Error::InvalidRef(format!(
"cannot update ref '{refname}': reference namespace conflict"
)));
}
let stor = crate::ref_namespace::storage_ref_name(refname);
let path = storage_dir.join(stor);
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let content = format!("ref: {target}\n");
let lock = lock_path_for_ref(&path);
fs::write(&lock, &content)?;
fs::rename(&lock, &path)?;
Ok(())
}
pub fn write_ref(git_dir: &Path, refname: &str, oid: &ObjectId) -> Result<()> {
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_write_ref(git_dir, refname, oid, None, None);
}
let storage_dir = ref_storage_dir(git_dir, refname);
if packed_ref_namespace_conflict(&storage_dir, refname)? {
return Err(Error::InvalidRef(format!(
"cannot update ref '{refname}': reference namespace conflict"
)));
}
let stor = crate::ref_namespace::storage_ref_name(refname);
let path = storage_dir.join(stor);
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let content = format!("{oid}\n");
let lock = lock_path_for_ref(&path);
fs::write(&lock, &content)?;
fs::rename(&lock, &path)?;
Ok(())
}
pub fn delete_ref(git_dir: &Path, refname: &str) -> Result<()> {
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_delete_ref(git_dir, refname);
}
let storage_dir = ref_storage_dir(git_dir, refname);
let stor = crate::ref_namespace::storage_ref_name(refname);
let path = storage_dir.join(&stor);
match fs::remove_file(&path) {
Ok(()) => {}
Err(e) if e.kind() == io::ErrorKind::NotFound => {}
Err(e) => return Err(Error::Io(e)),
}
remove_packed_ref(&storage_dir, &stor)?;
let log_path = storage_dir.join("logs").join(&stor);
if !refname.starts_with("refs/heads/") {
let _ = fs::remove_file(&log_path);
let logs_heads = storage_dir.join("logs/refs/heads");
let mut parent = log_path.parent();
while let Some(p) = parent {
if p == logs_heads.as_path() || !p.starts_with(&logs_heads) {
break;
}
if fs::remove_dir(p).is_err() {
break;
}
parent = p.parent();
}
}
Ok(())
}
fn remove_packed_ref(git_dir: &Path, refname: &str) -> Result<()> {
let packed_path = git_dir.join("packed-refs");
let content = match fs::read_to_string(&packed_path) {
Ok(c) => c,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(()),
Err(e) => return Err(Error::Io(e)),
};
let mut out = String::new();
let mut skip_peeled = false;
let mut changed = false;
let mut header_written = false;
for line in content.lines() {
if skip_peeled {
if line.starts_with('^') {
changed = true;
continue;
}
skip_peeled = false;
}
if line.starts_with('#') {
continue;
}
if line.starts_with('^') {
out.push_str(line);
out.push('\n');
continue;
}
if !header_written {
out.insert_str(0, "# pack-refs with: peeled fully-peeled sorted\n");
header_written = true;
}
let mut parts = line.splitn(2, ' ');
let _hash = parts.next().unwrap_or("");
let name = parts.next().unwrap_or("").trim();
if name == refname {
changed = true;
skip_peeled = true;
continue;
}
out.push_str(line);
out.push('\n');
}
if changed {
let lock = packed_path.with_extension("new");
let mut file = std::fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(&lock)
.map_err(Error::Io)?;
use std::io::Write as _;
file.write_all(out.as_bytes()).map_err(Error::Io)?;
drop(file);
fs::rename(&lock, &packed_path).map_err(Error::Io)?;
}
Ok(())
}
pub fn read_head(git_dir: &Path) -> Result<Option<String>> {
match read_ref_file(&git_dir.join("HEAD"))? {
Ref::Symbolic(target) => Ok(Some(target)),
Ref::Direct(_) => Ok(None),
}
}
pub fn read_symbolic_ref(git_dir: &Path, refname: &str) -> Result<Option<String>> {
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_read_symbolic_ref(git_dir, refname);
}
let storage_owned = crate::ref_namespace::storage_ref_name(refname);
let try_names: Vec<&str> =
if refname == "HEAD" && crate::ref_namespace::ref_storage_prefix().is_some() {
vec![storage_owned.as_str()]
} else if storage_owned != refname {
vec![storage_owned.as_str(), refname]
} else {
vec![refname]
};
for name in try_names {
let path = git_dir.join(name);
match read_ref_file(&path) {
Ok(Ref::Symbolic(target)) => return Ok(Some(target)),
Ok(Ref::Direct(_)) => return Ok(None),
Err(Error::Io(ref e))
if e.kind() == io::ErrorKind::NotFound
|| e.kind() == io::ErrorKind::NotADirectory
|| e.kind() == io::ErrorKind::IsADirectory => {}
Err(e) => return Err(e),
}
if !notes_merge_state_ref(name) {
if let Some(common) = common_dir(git_dir) {
if common != git_dir {
let cpath = common.join(name);
match read_ref_file(&cpath) {
Ok(Ref::Symbolic(target)) => return Ok(Some(target)),
Ok(Ref::Direct(_)) => return Ok(None),
Err(Error::Io(ref e)) if e.kind() == io::ErrorKind::NotFound => {}
Err(Error::Io(ref e)) if e.kind() == io::ErrorKind::NotADirectory => {}
Err(Error::Io(ref e)) if e.kind() == io::ErrorKind::IsADirectory => {}
Err(e) => return Err(e),
}
}
}
}
}
Ok(None)
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LogRefsConfig {
Unset,
None,
Normal,
Always,
}
pub fn read_log_refs_config(git_dir: &Path) -> LogRefsConfig {
let config_dir = common_dir(git_dir).unwrap_or_else(|| git_dir.to_path_buf());
let config_path = config_dir.join("config");
let content = match fs::read_to_string(config_path) {
Ok(c) => c,
Err(_) => return LogRefsConfig::Unset,
};
let mut in_core = false;
for line in content.lines() {
let trimmed = line.trim();
if trimmed.starts_with('[') {
in_core = trimmed.to_ascii_lowercase().starts_with("[core]");
continue;
}
if !in_core {
continue;
}
let Some((key, value)) = trimmed.split_once('=') else {
continue;
};
if !key.trim().eq_ignore_ascii_case("logallrefupdates") {
continue;
}
let v = value.trim();
let lower = v.to_ascii_lowercase();
return match lower.as_str() {
"always" => LogRefsConfig::Always,
"1" | "true" | "yes" | "on" => LogRefsConfig::Normal,
"0" | "false" | "no" | "off" | "never" => LogRefsConfig::None,
_ => LogRefsConfig::Unset,
};
}
LogRefsConfig::Unset
}
fn read_core_bare(git_dir: &Path) -> bool {
let config_dir = common_dir(git_dir).unwrap_or_else(|| git_dir.to_path_buf());
let config_path = config_dir.join("config");
let Ok(content) = fs::read_to_string(config_path) else {
return false;
};
let mut in_core = false;
for line in content.lines() {
let trimmed = line.trim();
if trimmed.starts_with('[') {
in_core = trimmed.to_ascii_lowercase().starts_with("[core]");
continue;
}
if !in_core {
continue;
}
let Some((key, value)) = trimmed.split_once('=') else {
continue;
};
if key.trim().eq_ignore_ascii_case("bare") {
let v = value.trim().to_ascii_lowercase();
return matches!(v.as_str(), "1" | "true" | "yes" | "on");
}
}
false
}
pub fn effective_log_refs_config(git_dir: &Path) -> LogRefsConfig {
match read_log_refs_config(git_dir) {
LogRefsConfig::Unset => {
if read_core_bare(git_dir) {
LogRefsConfig::None
} else {
LogRefsConfig::Normal
}
}
other => other,
}
}
#[must_use]
pub fn should_autocreate_reflog_for_mode(refname: &str, mode: LogRefsConfig) -> bool {
match mode {
LogRefsConfig::Always => true,
LogRefsConfig::Normal => {
refname == "HEAD"
|| refname.starts_with("refs/heads/")
|| refname.starts_with("refs/remotes/")
|| refname.starts_with("refs/notes/")
}
LogRefsConfig::None | LogRefsConfig::Unset => false,
}
}
#[must_use]
pub fn should_autocreate_reflog(git_dir: &Path, refname: &str) -> bool {
should_autocreate_reflog_for_mode(refname, effective_log_refs_config(git_dir))
}
pub fn append_reflog(
git_dir: &Path,
refname: &str,
old_oid: &ObjectId,
new_oid: &ObjectId,
identity: &str,
message: &str,
force_create: bool,
) -> Result<()> {
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_append_reflog(
git_dir,
refname,
old_oid,
new_oid,
identity,
message,
force_create,
);
}
let storage_dir = ref_storage_dir(git_dir, refname);
let stor = crate::ref_namespace::storage_ref_name(refname);
let log_path = storage_dir.join("logs").join(&stor);
let may_write =
force_create || should_autocreate_reflog(git_dir, refname) || !message.is_empty();
if !may_write && !log_path.exists() {
return Ok(());
}
if let Some(parent) = log_path.parent() {
fs::create_dir_all(parent)?;
}
let line = if message.is_empty() {
format!("{old_oid} {new_oid} {identity}\n")
} else {
format!("{old_oid} {new_oid} {identity}\t{message}\n")
};
let mut file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(&log_path)?;
use io::Write;
file.write_all(line.as_bytes())?;
Ok(())
}
#[must_use]
pub fn reflog_file_path(git_dir: &Path, refname: &str) -> PathBuf {
ref_storage_dir(git_dir, refname).join("logs").join(refname)
}
fn ref_storage_dir(git_dir: &Path, refname: &str) -> PathBuf {
if refname == "HEAD" || refname == "NOTES_MERGE_PARTIAL" || refname == "NOTES_MERGE_REF" {
return git_dir.to_path_buf();
}
if refname.starts_with("refs/worktree/") {
return git_dir.to_path_buf();
}
common_dir(git_dir).unwrap_or_else(|| git_dir.to_path_buf())
}
fn normalize_list_refs_prefix(git_dir: &Path, prefix: &str) -> String {
if prefix.is_empty() {
return String::new();
}
if prefix.ends_with('/') {
return prefix.to_string();
}
let candidate = ref_storage_dir(git_dir, prefix).join(prefix);
if candidate.is_file() {
prefix.to_string()
} else {
format!("{prefix}/")
}
}
pub fn list_refs(git_dir: &Path, prefix: &str) -> Result<Vec<(String, ObjectId)>> {
let prefix_norm = normalize_list_refs_prefix(git_dir, prefix);
let prefix = prefix_norm.as_str();
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_list_refs(git_dir, prefix);
}
let mut by_name: HashMap<String, ObjectId> = HashMap::new();
let stored_prefixes: Vec<String> = if let Some(ns) = crate::ref_namespace::ref_storage_prefix()
{
if prefix.starts_with("refs/namespaces/") {
vec![prefix.to_owned()]
} else if prefix.starts_with("refs/") {
vec![format!("{ns}{prefix}")]
} else {
vec![prefix.to_owned()]
}
} else {
vec![prefix.to_owned()]
};
for stored_prefix in stored_prefixes {
if let Some(cdir) = common_dir(git_dir) {
if cdir != git_dir {
collect_packed_refs_into_map(&cdir, &stored_prefix, false, &mut by_name)?;
let cbase = cdir.join(&stored_prefix);
collect_loose_refs_into_map(&cbase, &stored_prefix, &cdir, false, &mut by_name)?;
}
}
collect_packed_refs_into_map(git_dir, &stored_prefix, false, &mut by_name)?;
let base = git_dir.join(&stored_prefix);
collect_loose_refs_into_map(&base, &stored_prefix, git_dir, false, &mut by_name)?;
}
let mut results: Vec<(String, ObjectId)> = by_name.into_iter().collect();
results.sort_by(|a, b| a.0.cmp(&b.0));
Ok(results)
}
pub fn list_refs_physical(git_dir: &Path, prefix: &str) -> Result<Vec<(String, ObjectId)>> {
if crate::reftable::is_reftable_repo(git_dir) {
return crate::reftable::reftable_list_refs(git_dir, prefix);
}
let mut by_name: HashMap<String, ObjectId> = HashMap::new();
let stored_prefix = prefix.to_owned();
if let Some(cdir) = common_dir(git_dir) {
if cdir != git_dir {
collect_packed_refs_into_map(&cdir, &stored_prefix, true, &mut by_name)?;
let cbase = cdir.join(&stored_prefix);
collect_loose_refs_into_map(&cbase, &stored_prefix, &cdir, true, &mut by_name)?;
}
}
collect_packed_refs_into_map(git_dir, &stored_prefix, true, &mut by_name)?;
let base = git_dir.join(&stored_prefix);
collect_loose_refs_into_map(&base, &stored_prefix, git_dir, true, &mut by_name)?;
let mut results: Vec<(String, ObjectId)> = by_name.into_iter().collect();
results.sort_by(|a, b| a.0.cmp(&b.0));
Ok(results)
}
pub fn collect_alternate_ref_oids(receiving_git_dir: &Path) -> Result<Vec<ObjectId>> {
let config = ConfigSet::load(Some(receiving_git_dir), true)?;
let objects_dir = receiving_git_dir.join("objects");
let alternates = pack::read_alternates_recursive(&objects_dir).unwrap_or_default();
let mut out = Vec::new();
let mut seen = std::collections::HashSet::new();
for alt_objects in alternates {
let Some(alt_git_dir) = alt_objects.parent().map(PathBuf::from) else {
continue;
};
if !alt_git_dir.join("refs").is_dir() {
continue;
}
if let Some(prefixes) = config.get("core.alternateRefsPrefixes") {
for part in prefixes.split_whitespace() {
for (_, oid) in list_refs(&alt_git_dir, part)? {
if seen.insert(oid) {
out.push(oid);
}
}
}
} else {
for (_, oid) in list_refs(&alt_git_dir, "refs/")? {
if seen.insert(oid) {
out.push(oid);
}
}
}
}
Ok(out)
}
pub fn list_refs_glob(git_dir: &Path, pattern: &str) -> Result<Vec<(String, ObjectId)>> {
let glob_pos = pattern.find(['*', '?', '[']);
let prefix_owned: String = match glob_pos {
Some(pos) => match pattern[..pos].rfind('/') {
Some(slash) => pattern[..=slash].to_owned(),
None => String::new(),
},
None => {
let mut p = pattern.trim_end_matches('/').to_owned();
if !p.is_empty() {
p.push('/');
}
p
}
};
let prefix = prefix_owned.as_str();
let all = list_refs(git_dir, prefix)?;
let mut results = Vec::new();
for (refname, oid) in all {
if ref_matches_glob(&refname, pattern) {
results.push((refname, oid));
}
}
Ok(results)
}
pub fn ref_matches_glob(refname: &str, pattern: &str) -> bool {
if !pattern.contains('*') && !pattern.contains('?') && !pattern.contains('[') {
return refname == pattern
|| refname.ends_with(&format!("/{pattern}"))
|| refname.starts_with(&format!("{pattern}/"));
}
glob_match(pattern, refname)
}
fn glob_match(pattern: &str, text: &str) -> bool {
let pat = pattern.as_bytes();
let txt = text.as_bytes();
let (mut pi, mut ti) = (0, 0);
let (mut star_pi, mut star_ti) = (usize::MAX, 0);
while ti < txt.len() {
if pi < pat.len() && (pat[pi] == b'?' || pat[pi] == txt[ti]) {
pi += 1;
ti += 1;
} else if pi < pat.len() && pat[pi] == b'*' {
star_pi = pi;
star_ti = ti;
pi += 1;
} else if star_pi != usize::MAX {
pi = star_pi + 1;
star_ti += 1;
ti = star_ti;
} else {
return false;
}
}
while pi < pat.len() && pat[pi] == b'*' {
pi += 1;
}
pi == pat.len()
}
fn loose_ref_file_direct_oid(path: &Path) -> Option<ObjectId> {
let content = fs::read_to_string(path).ok()?;
let content = content.trim_end_matches('\n').trim();
if content.len() == 40 && content.chars().all(|c| c.is_ascii_hexdigit()) {
content.parse().ok()
} else {
None
}
}
fn collect_loose_refs_into_map(
dir: &Path,
prefix: &str,
resolve_git_dir: &Path,
physical_keys: bool,
out: &mut HashMap<String, ObjectId>,
) -> Result<()> {
let read = match fs::read_dir(dir) {
Ok(r) => r,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(()),
Err(e) => return Err(Error::Io(e)),
};
for entry in read {
let entry = entry?;
let name = entry.file_name();
let name_str = name.to_string_lossy();
let refname = format!("{prefix}{name_str}");
let path = entry.path();
let meta = match fs::metadata(&path) {
Ok(m) => m,
Err(_) => continue,
};
if meta.is_dir() {
collect_loose_refs_into_map(
&path,
&format!("{refname}/"),
resolve_git_dir,
physical_keys,
out,
)?;
} else if meta.is_file() {
if physical_keys {
if let Some(oid) = loose_ref_file_direct_oid(&path) {
out.insert(refname, oid);
} else if let Ok(Ref::Symbolic(target)) = read_ref_file(&path) {
if let Ok(oid) = resolve_ref(resolve_git_dir, target.trim()) {
out.insert(refname, oid);
}
}
} else {
let logical = crate::ref_namespace::logical_ref_name_from_storage(&refname)
.unwrap_or_else(|| refname.clone());
if let Ok(oid) = resolve_ref(resolve_git_dir, &logical) {
out.insert(logical, oid);
}
}
}
}
Ok(())
}
pub fn resolve_at_n_branch(git_dir: &Path, spec: &str) -> Result<String> {
let inner = spec
.strip_prefix("@{-")
.and_then(|s| s.strip_suffix('}'))
.ok_or_else(|| Error::InvalidRef(format!("not an @{{-N}} ref: {spec}")))?;
let n: usize = inner
.parse()
.map_err(|_| Error::InvalidRef(format!("invalid N in {spec}")))?;
if n == 0 {
return Err(Error::InvalidRef("@{-0} is not valid".to_string()));
}
let entries = crate::reflog::read_reflog(git_dir, "HEAD")?;
let mut count = 0usize;
for entry in entries.iter().rev() {
let msg = &entry.message;
if let Some(rest) = msg.strip_prefix("checkout: moving from ") {
count += 1;
if count == n {
if let Some(to_pos) = rest.find(" to ") {
return Ok(rest[..to_pos].to_string());
}
}
}
}
Err(Error::InvalidRef(format!(
"{spec}: only {count} checkout(s) in reflog"
)))
}
fn ref_name_matches_list_prefix(refname: &str, prefix: &str) -> bool {
if refname.starts_with(prefix) {
return true;
}
if prefix.ends_with('/') {
let trimmed = prefix.trim_end_matches('/');
if refname == trimmed {
return true;
}
}
false
}
fn collect_packed_refs_into_map(
git_dir: &Path,
prefix: &str,
physical_keys: bool,
out: &mut HashMap<String, ObjectId>,
) -> Result<()> {
let packed_path = git_dir.join("packed-refs");
let content = match fs::read_to_string(&packed_path) {
Ok(c) => c,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(()),
Err(e) => return Err(Error::Io(e)),
};
for line in content.lines() {
if line.starts_with('#') || line.starts_with('^') || line.is_empty() {
continue;
}
let mut parts = line.splitn(2, ' ');
let hash = parts.next().unwrap_or("");
let refname = parts.next().unwrap_or("").trim();
if !ref_name_matches_list_prefix(refname, prefix) || hash.len() != 40 {
continue;
}
let oid: ObjectId = hash.parse()?;
let key = if physical_keys {
refname.to_owned()
} else {
crate::ref_namespace::logical_ref_name_from_storage(refname)
.unwrap_or_else(|| refname.to_owned())
};
out.insert(key, oid);
}
Ok(())
}
#[cfg(test)]
mod refname_available_tests {
use super::*;
use std::collections::{BTreeSet, HashSet};
use tempfile::tempdir;
#[test]
fn loose_parent_blocks_child_create() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/1l")).unwrap();
fs::write(
git_dir.join("refs/1l/c"),
"67bf698f3ab735e92fb011a99cff3497c44d30c1\n",
)
.unwrap();
assert_eq!(
read_raw_ref(git_dir, "refs/1l/c").unwrap(),
RawRefLookup::Exists
);
let extras = BTreeSet::from([
"refs/1l/b".to_string(),
"refs/1l/c/x".to_string(),
"refs/1l/d".to_string(),
]);
let skip = HashSet::new();
let err = verify_refname_available_for_create(git_dir, "refs/1l/c/x", &extras, &skip)
.unwrap_err();
assert!(matches!(
err,
RefnameUnavailable::AncestorExists { ref blocking, .. } if blocking == "refs/1l/c"
));
}
#[test]
fn verify_sees_loose_ref_after_canonical_git_dir() {
let dir = tempdir().unwrap();
let git_dir = dir.path().join(".git");
fs::create_dir_all(git_dir.join("refs/1l")).unwrap();
fs::write(
git_dir.join("refs/1l/c"),
"67bf698f3ab735e92fb011a99cff3497c44d30c1\n",
)
.unwrap();
let skip = HashSet::new();
let extras = BTreeSet::new();
let err = verify_refname_available_for_create(&git_dir, "refs/1l/c/x", &extras, &skip)
.unwrap_err();
assert!(matches!(
err,
RefnameUnavailable::AncestorExists { ref blocking, .. } if blocking == "refs/1l/c"
));
}
#[test]
fn list_refs_finds_sibling_under_parent_directory() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/ns/p")).unwrap();
fs::write(
git_dir.join("refs/ns/p/x"),
"67bf698f3ab735e92fb011a99cff3497c44d30c1\n",
)
.unwrap();
let listed = list_refs(git_dir, "refs/ns/p/").unwrap();
assert!(
listed.iter().any(|(n, _)| n == "refs/ns/p/x"),
"got {listed:?}"
);
}
#[test]
fn verify_blocks_parent_when_child_ref_exists() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/ns/p")).unwrap();
fs::write(
git_dir.join("refs/ns/p/x"),
"67bf698f3ab735e92fb011a99cff3497c44d30c1\n",
)
.unwrap();
let extras = BTreeSet::from(["refs/ns/p".to_string()]);
let skip = HashSet::new();
let err =
verify_refname_available_for_create(git_dir, "refs/ns/p", &extras, &skip).unwrap_err();
assert!(matches!(
err,
RefnameUnavailable::DescendantExists { ref blocking, .. }
if blocking == "refs/ns/p/x"
));
}
#[test]
fn verify_blocks_parent_git_style_nested_path() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/3l/c")).unwrap();
fs::write(
git_dir.join("refs/3l/c/x"),
"67bf698f3ab735e92fb011a99cff3497c44d30c1\n",
)
.unwrap();
let extras = BTreeSet::from(["refs/3l/c".to_string()]);
let skip = HashSet::new();
let err =
verify_refname_available_for_create(git_dir, "refs/3l/c", &extras, &skip).unwrap_err();
assert!(matches!(
err,
RefnameUnavailable::DescendantExists { ref blocking, .. }
if blocking == "refs/3l/c/x"
));
}
#[test]
fn intermediate_directory_does_not_block_nested_create() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/ns")).unwrap();
fs::write(
git_dir.join("refs/ns/existing"),
"67bf698f3ab735e92fb011a99cff3497c44d30c1\n",
)
.unwrap();
assert_eq!(
read_raw_ref(git_dir, "refs/ns").unwrap(),
RawRefLookup::IsDirectory
);
let extras = BTreeSet::from(["refs/ns/newchild".to_string()]);
let skip = HashSet::new();
verify_refname_available_for_create(git_dir, "refs/ns/newchild", &extras, &skip).unwrap();
}
}
#[cfg(test)]
mod read_raw_ref_tests {
use super::*;
use tempfile::tempdir;
#[test]
fn loose_ref_file_is_exists() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/heads")).unwrap();
fs::write(
git_dir.join("refs/heads/side"),
"0000000000000000000000000000000000000000\n",
)
.unwrap();
assert_eq!(
read_raw_ref(git_dir, "refs/heads/side").unwrap(),
RawRefLookup::Exists
);
}
#[test]
fn missing_ref_is_not_found() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/heads")).unwrap();
assert_eq!(
read_raw_ref(git_dir, "refs/heads/nope").unwrap(),
RawRefLookup::NotFound
);
}
#[test]
fn directory_where_ref_expected_is_is_directory() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::create_dir_all(git_dir.join("refs/heads")).unwrap();
assert_eq!(
read_raw_ref(git_dir, "refs/heads").unwrap(),
RawRefLookup::IsDirectory
);
}
#[test]
fn packed_ref_name_is_exists() {
let dir = tempdir().unwrap();
let git_dir = dir.path();
fs::write(
git_dir.join("packed-refs"),
"# pack-refs with: peeled fully-peeled \n\
0000000000000000000000000000000000000000 refs/heads/packed\n",
)
.unwrap();
assert_eq!(
read_raw_ref(git_dir, "refs/heads/packed").unwrap(),
RawRefLookup::Exists
);
}
}