use std::collections::hash_map;
use std::collections::HashMap;
use bstr::BStr;
use bstr::BString;
use itertools::Itertools as _;
use pollster::FutureExt;
use crate::backend::BackendError;
use crate::backend::CommitId;
use crate::commit::Commit;
use crate::conflicts::materialize_merge_result;
use crate::conflicts::materialize_tree_value;
use crate::conflicts::MaterializedTreeValue;
use crate::diff::Diff;
use crate::diff::DiffHunkKind;
use crate::fileset::FilesetExpression;
use crate::graph::GraphEdge;
use crate::graph::GraphEdgeType;
use crate::merged_tree::MergedTree;
use crate::repo::Repo;
use crate::repo_path::RepoPath;
use crate::revset::RevsetEvaluationError;
use crate::revset::RevsetExpression;
use crate::revset::RevsetFilterPredicate;
use crate::store::Store;
#[derive(Clone, Debug)]
pub struct FileAnnotation {
line_map: OriginalLineMap,
text: BString,
}
impl FileAnnotation {
pub fn lines(&self) -> impl Iterator<Item = (&CommitId, &BStr)> {
itertools::zip_eq(&self.line_map, self.text.split_inclusive(|b| *b == b'\n'))
.map(|(commit_id, line)| (commit_id.as_ref().unwrap(), line.as_ref()))
}
pub fn text(&self) -> &BStr {
self.text.as_ref()
}
}
type CommitSourceMap = HashMap<CommitId, Source>;
#[derive(Clone, Debug)]
struct Source {
line_map: Vec<(usize, usize)>,
text: BString,
}
impl Source {
fn load(commit: &Commit, file_path: &RepoPath) -> Result<Self, BackendError> {
let tree = commit.tree()?;
let text = get_file_contents(commit.store(), file_path, &tree)?;
Ok(Source {
line_map: Vec::new(),
text: text.into(),
})
}
fn fill_line_map(&mut self) {
let lines = self.text.split_inclusive(|b| *b == b'\n');
self.line_map = lines.enumerate().map(|(i, _)| (i, i)).collect();
}
}
type OriginalLineMap = Vec<Option<CommitId>>;
pub fn get_annotation_for_file(
repo: &dyn Repo,
starting_commit: &Commit,
file_path: &RepoPath,
) -> Result<FileAnnotation, RevsetEvaluationError> {
let mut source = Source::load(starting_commit, file_path)?;
source.fill_line_map();
let text = source.text.clone();
let line_map = process_commits(repo, starting_commit.id(), source, file_path)?;
Ok(FileAnnotation { line_map, text })
}
fn process_commits(
repo: &dyn Repo,
starting_commit_id: &CommitId,
starting_source: Source,
file_name: &RepoPath,
) -> Result<OriginalLineMap, RevsetEvaluationError> {
let predicate = RevsetFilterPredicate::File(FilesetExpression::file_path(file_name.to_owned()));
let revset = RevsetExpression::commit(starting_commit_id.clone())
.union(
&RevsetExpression::commit(starting_commit_id.clone())
.ancestors()
.filtered(predicate),
)
.evaluate_programmatic(repo)
.map_err(|e| e.expect_backend_error())?;
let mut original_line_map = vec![None; starting_source.line_map.len()];
let mut commit_source_map = HashMap::from([(starting_commit_id.clone(), starting_source)]);
for node in revset.iter_graph() {
let (commit_id, edge_list) = node?;
process_commit(
repo,
file_name,
&mut original_line_map,
&mut commit_source_map,
&commit_id,
&edge_list,
)?;
if commit_source_map.is_empty() {
break;
}
}
Ok(original_line_map)
}
fn process_commit(
repo: &dyn Repo,
file_name: &RepoPath,
original_line_map: &mut OriginalLineMap,
commit_source_map: &mut CommitSourceMap,
current_commit_id: &CommitId,
edges: &[GraphEdge<CommitId>],
) -> Result<(), BackendError> {
let Some(mut current_source) = commit_source_map.remove(current_commit_id) else {
return Ok(());
};
for parent_edge in edges {
if parent_edge.edge_type == GraphEdgeType::Missing {
continue;
}
let parent_commit_id = &parent_edge.target;
let parent_source = match commit_source_map.entry(parent_commit_id.clone()) {
hash_map::Entry::Occupied(entry) => entry.into_mut(),
hash_map::Entry::Vacant(entry) => {
let commit = repo.store().get_commit(entry.key())?;
entry.insert(Source::load(&commit, file_name)?)
}
};
let mut current_lines = current_source.line_map.iter().copied().peekable();
let mut new_current_line_map = Vec::new();
let mut new_parent_line_map = Vec::new();
copy_same_lines_with(
¤t_source.text,
&parent_source.text,
|current_start, parent_start, count| {
new_current_line_map
.extend(current_lines.peeking_take_while(|&(cur, _)| cur < current_start));
while let Some((current, original)) =
current_lines.next_if(|&(cur, _)| cur < current_start + count)
{
let parent = parent_start + (current - current_start);
new_parent_line_map.push((parent, original));
}
},
);
new_current_line_map.extend(current_lines);
current_source.line_map = new_current_line_map;
parent_source.line_map = if parent_source.line_map.is_empty() {
new_parent_line_map
} else {
itertools::merge(parent_source.line_map.iter().copied(), new_parent_line_map).collect()
};
if parent_source.line_map.is_empty() {
commit_source_map.remove(parent_commit_id);
}
}
for (_, original_line_number) in current_source.line_map {
original_line_map[original_line_number] = Some(current_commit_id.clone());
}
Ok(())
}
fn copy_same_lines_with(
current_contents: &[u8],
parent_contents: &[u8],
mut copy: impl FnMut(usize, usize, usize),
) {
let diff = Diff::by_line([current_contents, parent_contents]);
let mut current_line_counter: usize = 0;
let mut parent_line_counter: usize = 0;
for hunk in diff.hunks() {
match hunk.kind {
DiffHunkKind::Matching => {
let count = hunk.contents[0].split_inclusive(|b| *b == b'\n').count();
copy(current_line_counter, parent_line_counter, count);
current_line_counter += count;
parent_line_counter += count;
}
DiffHunkKind::Different => {
let current_output = hunk.contents[0];
let parent_output = hunk.contents[1];
current_line_counter += current_output.split_inclusive(|b| *b == b'\n').count();
parent_line_counter += parent_output.split_inclusive(|b| *b == b'\n').count();
}
}
}
}
fn get_file_contents(
store: &Store,
path: &RepoPath,
tree: &MergedTree,
) -> Result<Vec<u8>, BackendError> {
let file_value = tree.path_value(path)?;
let effective_file_value = materialize_tree_value(store, path, file_value).block_on()?;
match effective_file_value {
MaterializedTreeValue::File { mut reader, id, .. } => {
let mut file_contents = Vec::new();
reader
.read_to_end(&mut file_contents)
.map_err(|e| BackendError::ReadFile {
path: path.to_owned(),
id,
source: Box::new(e),
})?;
Ok(file_contents)
}
MaterializedTreeValue::FileConflict { id, contents, .. } => {
let mut materialized_conflict_buffer = Vec::new();
materialize_merge_result(&contents, &mut materialized_conflict_buffer).map_err(
|io_err| BackendError::ReadFile {
path: path.to_owned(),
source: Box::new(io_err),
id: id.first().clone().unwrap(),
},
)?;
Ok(materialized_conflict_buffer)
}
_ => Ok(Vec::new()),
}
}