use std::cmp::{self};
use threadpool::ThreadPool;
use crate::lines_highlighter::LineAcceptance;
use crate::lines_highlighter::{LinesHighlighter, Response};
use crate::refiner::diff;
use crate::string_future::StringFuture;
use crate::token_collector::{
render, Style, StyledToken, LINE_STYLE_NEW_FILENAME, LINE_STYLE_OLD_FILENAME,
};
use crate::hunk_highlighter::HunkLinesHighlighter;
use crate::refiner::Formatter;
pub(crate) struct FileHighlighter {
old_name: String,
new_name: String,
formatter: Formatter,
sub_highlighter: Option<Box<dyn LinesHighlighter>>,
url: Option<url::Url>,
}
fn without_timestamp(name: &str) -> &str {
if let Some(idx) = name.rfind('\t').or_else(|| name.rfind(" ")) {
return &name[..idx];
} else {
return name;
}
}
impl LinesHighlighter for FileHighlighter {
fn consume_line(&mut self, line: &str, thread_pool: &ThreadPool) -> Result<Response, String> {
assert!(!self.old_name.is_empty());
if self.new_name.is_empty() {
if let Some(new_name) = line.strip_prefix("+++ ") {
self.new_name.push_str(new_name);
self.url = hyperlink_filename(without_timestamp(new_name));
return Ok(Response {
line_accepted: LineAcceptance::AcceptedWantMore,
highlighted: vec![StringFuture::from_string(self.highlighted())],
});
}
return Err("--- was not followed by +++".to_string());
}
let mut highlights: Vec<StringFuture> = Vec::new();
if let Some(ref mut highlighter) = self.sub_highlighter {
let resp = highlighter.consume_line(line, thread_pool)?;
highlights = resp.highlighted;
match resp.line_accepted {
LineAcceptance::AcceptedWantMore => {
return Ok(Response {
line_accepted: LineAcceptance::AcceptedWantMore,
highlighted: highlights,
});
}
LineAcceptance::AcceptedDone => {
self.sub_highlighter = None;
return Ok(Response {
line_accepted: LineAcceptance::AcceptedWantMore,
highlighted: highlights,
});
}
LineAcceptance::RejectedDone => {
self.sub_highlighter = None;
}
}
}
if let Some(hunk_highlighter) =
HunkLinesHighlighter::from_line(line, self.formatter.clone(), &self.url)?
{
self.sub_highlighter = Some(Box::new(hunk_highlighter));
return Ok(Response {
line_accepted: LineAcceptance::AcceptedWantMore,
highlighted: highlights,
});
}
return Ok(Response {
line_accepted: LineAcceptance::RejectedDone,
highlighted: highlights,
});
}
fn consume_eof(&mut self, thread_pool: &ThreadPool) -> Result<Vec<StringFuture>, String> {
if self.new_name.is_empty() {
return Err("Input ended early, --- should have been followed by +++".to_string());
}
if let Some(ref mut sub) = self.sub_highlighter {
return sub.consume_eof(thread_pool);
}
Ok(vec![])
}
}
impl FileHighlighter {
pub(crate) fn from_line(line: &str, formatter: Formatter) -> Option<Self>
where
Self: Sized,
{
if !line.starts_with("--- ") {
return None;
}
let highlighter = FileHighlighter {
old_name: line.strip_prefix("--- ").unwrap().to_string(),
new_name: String::new(),
formatter,
sub_highlighter: None,
url: None, };
return Some(highlighter);
}
fn highlighted(&self) -> String {
let (mut old_tokens, mut new_tokens) = diff(&self.old_name, &self.new_name);
if self.old_name == "/dev/null" {
for token in &mut new_tokens {
token.style = Style::Context;
}
}
let new_prefix = if self.old_name == "/dev/null" {
Some(StyledToken::new("NEW ".to_string(), Style::Bright))
} else {
None
};
if self.new_name == "/dev/null" {
for token in &mut old_tokens {
token.style = Style::Context;
}
}
let old_prefix = if self.new_name == "/dev/null" {
Some(StyledToken::new("DELETED ".to_string(), Style::Bright))
} else {
None
};
decorate_paths(&mut old_tokens, &mut new_tokens);
if let Some(prefix) = new_prefix {
new_tokens.insert(0, prefix);
}
if let Some(prefix) = old_prefix {
old_tokens.insert(0, prefix);
}
let old_filename = render(&LINE_STYLE_OLD_FILENAME, "--- ", &old_tokens);
let new_filename = render(&LINE_STYLE_NEW_FILENAME, "+++ ", &new_tokens);
let mut highlighted = String::new();
highlighted.push_str(&old_filename);
highlighted.push('\n');
highlighted.push_str(&new_filename);
highlighted.push('\n');
return highlighted;
}
}
fn hyperlink_filename(filename: &str) -> Option<url::Url> {
let mut raw_candidates: Vec<&str> = Vec::new();
raw_candidates.push(filename);
if let Some(no_a_prefix) = filename.strip_prefix("a/") {
raw_candidates.push(no_a_prefix);
}
if let Some(no_b_prefix) = filename.strip_prefix("b/") {
raw_candidates.push(no_b_prefix);
}
for candidate in raw_candidates {
if candidate == "/dev/null" {
continue;
}
let mut path = std::path::PathBuf::from(candidate);
if !path.is_absolute() {
if let Ok(cwd) = std::env::current_dir() {
path = cwd.join(&path);
}
}
if !path.exists() {
continue;
}
let canonical = path.canonicalize().unwrap_or(path);
if canonical == std::path::Path::new("/dev/null") {
continue;
}
if let Ok(url) = url::Url::from_file_path(&canonical) {
return Some(url);
}
}
return None;
}
fn hyperlink_tokenized(just_path: &mut [StyledToken], just_filename: &mut [StyledToken]) {
let mut filename = String::new();
for token in just_path.iter() {
filename.push_str(&token.token);
}
for token in just_filename.iter() {
filename.push_str(&token.token);
}
if let Some(url) = hyperlink_filename(&filename) {
for token in just_path.iter_mut() {
token.url = Some(url.clone());
}
for token in just_filename.iter_mut() {
token.url = Some(url.clone());
}
}
}
fn lowlight_dev_null(just_path: &mut [StyledToken], just_filename: &mut [StyledToken]) {
if just_path.len() < 3 {
return;
}
if just_filename.len() != 1 {
return;
}
if just_path[0].token == "/"
&& just_path[1].token == "dev"
&& just_path[2].token == "/"
&& just_filename[0].token == "null"
{
for token in just_path {
token.style = Style::Lowlighted;
}
for token in just_filename {
token.style = Style::Lowlighted;
}
}
}
fn align_tabs(old: &mut [StyledToken], new: &mut [StyledToken]) {
let old_tab_index_token = old.iter().position(|token| token.token == "\t");
if old_tab_index_token.is_none() {
return;
}
let old_tab_index_token = old_tab_index_token.unwrap();
let old_tab_index_char = old
.iter()
.take(old_tab_index_token)
.map(|token| token.token.chars().count())
.sum::<usize>();
let new_tab_index_token = new.iter().position(|token| token.token == "\t");
if new_tab_index_token.is_none() {
return;
}
let new_tab_index_token = new_tab_index_token.unwrap();
let new_tab_index_char = new
.iter()
.take(new_tab_index_token)
.map(|token| token.token.chars().count())
.sum::<usize>();
let old_spaces =
" ".repeat(2 + cmp::max(old_tab_index_char, new_tab_index_char) - old_tab_index_char);
let new_spaces =
" ".repeat(2 + cmp::max(old_tab_index_char, new_tab_index_char) - new_tab_index_char);
old[old_tab_index_token].token = old_spaces;
new[new_tab_index_token].token = new_spaces;
}
struct SplitRow<'a> {
prefix: &'a mut [StyledToken],
just_path: &'a mut [StyledToken],
just_filename: &'a mut [StyledToken],
time_space: &'a mut [StyledToken],
timestamp: &'a mut [StyledToken],
}
fn split_row<'a>(look_for_git_prefixes: bool, row: &'a mut [StyledToken]) -> SplitRow<'a> {
let path_start = if look_for_git_prefixes
&& row.len() >= 2
&& row[0].token.len() == 1 && (row[1].token == "/" || row[1].token == std::path::MAIN_SEPARATOR.to_string())
{
2
} else {
0
};
let path_end = row
.iter()
.position(|token| token.token == "\t" || token.token.chars().all(char::is_whitespace))
.unwrap_or(row.len());
let timestamp_start = cmp::min(path_end + 1, row.len());
let last_file_separator_index_from_path_start =
row[path_start..path_end].iter().rposition(|token| {
token.token == "/" || token.token == std::path::MAIN_SEPARATOR.to_string()
});
let (row, timestamp) = row.split_at_mut(timestamp_start);
let (prefix, rest) = row.split_at_mut(path_start);
let (path_and_filename, space_plus_timestamp) = rest.split_at_mut(path_end - path_start);
let time_space = if space_plus_timestamp.is_empty() {
&mut []
} else {
&mut space_plus_timestamp[..1]
};
let (just_path, just_filename) = if let Some(last_file_separator_index_from_path_start) =
last_file_separator_index_from_path_start
{
path_and_filename.split_at_mut(last_file_separator_index_from_path_start + 1)
} else {
path_and_filename.split_at_mut(0)
};
return SplitRow {
prefix,
just_path,
just_filename,
time_space,
timestamp,
};
}
fn have_git_prefixes(old_tokens: &[StyledToken], new_tokens: &[StyledToken]) -> bool {
let old_has_git_prefix = old_tokens.len() >= 2
&& old_tokens[0].token == "a"
&& (old_tokens[1].token == "/"
|| old_tokens[1].token == std::path::MAIN_SEPARATOR.to_string());
let old_is_absolute = !old_tokens.is_empty() && old_tokens[0].token == "/"
|| old_tokens[0].token == std::path::MAIN_SEPARATOR.to_string();
let new_has_git_prefix = new_tokens.len() >= 2
&& new_tokens[0].token == "b"
&& (new_tokens[1].token == "/"
|| new_tokens[1].token == std::path::MAIN_SEPARATOR.to_string());
let new_is_absolute = !new_tokens.is_empty() && new_tokens[0].token == "/"
|| new_tokens[0].token == std::path::MAIN_SEPARATOR.to_string();
return (old_has_git_prefix || old_is_absolute) && (new_has_git_prefix || new_is_absolute);
}
pub(crate) fn decorate_paths(old_tokens: &mut [StyledToken], new_tokens: &mut [StyledToken]) {
let look_for_git_prefixes = have_git_prefixes(old_tokens, new_tokens);
let old_split = split_row(look_for_git_prefixes, old_tokens);
let new_split = split_row(look_for_git_prefixes, new_tokens);
old_split.just_filename.iter_mut().for_each(|token| {
if token.style != Style::DiffPartHighlighted {
token.style = Style::Bright;
}
});
new_split.just_filename.iter_mut().for_each(|token| {
if token.style != Style::DiffPartHighlighted {
token.style = Style::Bright;
}
});
if old_split.just_path == new_split.just_path
&& old_split.just_filename == new_split.just_filename
{
hyperlink_tokenized(old_split.just_path, old_split.just_filename);
}
hyperlink_tokenized(new_split.just_path, new_split.just_filename);
lowlight_dev_null(old_split.just_path, old_split.just_filename);
lowlight_dev_null(new_split.just_path, new_split.just_filename);
old_split.time_space.iter_mut().for_each(|token| {
token.style = Style::Context;
});
new_split.time_space.iter_mut().for_each(|token| {
token.style = Style::Context;
});
old_split.timestamp.iter_mut().for_each(|token| {
token.style = Style::Lowlighted;
});
new_split.timestamp.iter_mut().for_each(|token| {
token.style = Style::Lowlighted;
});
old_split.prefix.iter_mut().for_each(|token| {
token.style = Style::Lowlighted;
});
new_split.prefix.iter_mut().for_each(|token| {
token.style = Style::Lowlighted;
});
align_tabs(old_tokens, new_tokens);
}
#[cfg(test)]
mod tests {
use crate::ansi::without_ansi_escape_codes;
use crate::constants::*;
use super::*;
const NOT_INVERSE_VIDEO: &str = "\x1b[27m";
const DEFAULT_COLOR: &str = "\x1b[39m";
use crate::refiner::tests::FORMATTER;
fn highlight_header_lines(old_line: &str, new_line: &str) -> String {
let mut test_me = FileHighlighter::from_line(old_line, FORMATTER.clone()).unwrap();
let response = test_me.consume_line(new_line, &ThreadPool::new(1)).unwrap();
assert_eq!(LineAcceptance::AcceptedWantMore, response.line_accepted);
assert_eq!(1, response.highlighted.len());
let highlighted = response
.highlighted
.into_iter()
.next()
.unwrap()
.get()
.to_string();
return highlighted;
}
#[test]
fn test_align_timestamps() {
let highlighted = highlight_header_lines(
"--- x.txt\t2023-12-15 15:43:29",
"+++ /Users/johan/xsrc/riff/README.md\t2024-01-29 14:56:40",
);
let highlighted_bytes = highlighted.clone().into_bytes();
let plain = String::from_utf8(without_ansi_escape_codes(&highlighted_bytes)).unwrap();
assert_eq!(
"--- x.txt 2023-12-15 15:43:29\n\
+++ /Users/johan/xsrc/riff/README.md 2024-01-29 14:56:40\n",
plain.as_str()
);
}
#[test]
fn test_header_with_timestamp_should_hyperlink() {
let mut test_me =
FileHighlighter::from_line("--- README.md\t2024-01-01 12:00:00", FORMATTER.clone())
.unwrap();
let response = test_me
.consume_line("+++ README.md\t2024-01-02 12:00:00", &ThreadPool::new(1))
.unwrap();
assert_eq!(LineAcceptance::AcceptedWantMore, response.line_accepted);
let path = test_me
.url
.unwrap()
.to_file_path()
.expect("Hyperlink should be a file path");
let canonical = std::fs::canonicalize(&path).expect("Path should canonicalize");
let expected = std::fs::canonicalize("README.md").expect("README.md should exist");
assert_eq!(canonical, expected, "Hyperlink should point to README.md");
}
#[test]
fn test_brighten_filename() {
let highlighted = highlight_header_lines("--- a/x/y/z.txt", "+++ b/x/y/z.txt");
assert_eq!(
format!(
"\
{BOLD}--- {NORMAL_INTENSITY}{FAINT}a/{NORMAL}x/y/{BOLD}z.txt{NORMAL}\n\
{BOLD}+++ {NORMAL_INTENSITY}{FAINT}b/{NORMAL}x/y/{BOLD}z.txt{NORMAL}\n"
),
highlighted
);
}
#[test]
fn test_brighten_filename_without_path() {
let highlighted = highlight_header_lines("--- z.txt", "+++ z.txt");
assert_eq!(
format!(
"\
{BOLD}--- z.txt{NORMAL}\n\
{BOLD}+++ z.txt{NORMAL}\n"
),
highlighted
);
}
#[test]
fn test_brighten_file_rename() {
let highlighted = highlight_header_lines("--- x.txt", "+++ y.txt");
assert_eq!(
format!(
"\
{BOLD}--- {INVERSE_VIDEO}{NORMAL_INTENSITY}{OLD}x{NOT_INVERSE_VIDEO}{BOLD}{DEFAULT_COLOR}.txt{NORMAL}\n\
{BOLD}+++ {INVERSE_VIDEO}{NORMAL_INTENSITY}{GREEN}y{NOT_INVERSE_VIDEO}{BOLD}{DEFAULT_COLOR}.txt{NORMAL}\n"
),
highlighted
);
}
#[test]
fn test_new_file_header() {
let highlighted = highlight_header_lines("--- /dev/null", "+++ b/newfile.txt");
assert_eq!(
format!(
"\
{BOLD}--- {NORMAL_INTENSITY}{FAINT}/dev/null{NORMAL}\n\
{BOLD}+++ NEW {NORMAL_INTENSITY}{FAINT}b/{NORMAL_INTENSITY}{BOLD}newfile.txt{NORMAL}\n"
),
highlighted
);
}
#[test]
fn test_deleted_file_header() {
let highlighted = highlight_header_lines("--- a/oldfile.txt", "+++ /dev/null");
assert_eq!(
format!(
"\
{BOLD}--- DELETED {NORMAL_INTENSITY}{FAINT}a/{NORMAL_INTENSITY}{BOLD}oldfile.txt{NORMAL}\n\
{BOLD}+++ {NORMAL_INTENSITY}{FAINT}/dev/null{NORMAL}\n"
),
highlighted
);
}
#[test]
fn test_hyperlink_filename_relative_path() {
let mut row = vec![StyledToken::new("README.md".to_string(), Style::Context)];
hyperlink_tokenized(&mut [], &mut row);
let url = row[0].url.as_ref().expect("Token should have a URL");
let url_path = url.to_file_path().expect("URL should be a file path");
let url_canon = std::fs::canonicalize(&url_path).expect("URL file should exist");
let readme_canon = std::fs::canonicalize("README.md").expect("README.md should exist");
assert_eq!(url_canon, readme_canon, "Canonicalized paths should match");
}
#[test]
fn test_hyperlink_filename_happy_path() {
assert!(
std::path::Path::new("README.md").exists(),
"README.md should exist in crate root"
);
let url_opt = super::hyperlink_filename("README.md");
assert!(
url_opt.is_some(),
"Expected Some(url::Url) for existing relative path"
);
let url = url_opt.unwrap();
assert_eq!(url.scheme(), "file", "Scheme should be file");
let path_from_url = url.to_file_path().expect("Should convert URL back to path");
assert!(path_from_url.exists(), "Path from URL should exist");
let expected = std::fs::canonicalize("README.md").expect("Canonicalize README.md");
let actual = std::fs::canonicalize(&path_from_url).expect("Canonicalize URL path");
assert_eq!(actual, expected, "Canonical paths must match");
}
#[test]
fn test_hyperlink_filename_missing_file() {
let url_opt = super::hyperlink_filename("does-not-exist.txt");
assert!(
url_opt.is_none(),
"Expected None for non-existing relative path"
);
}
#[test]
fn test_hyperlink_filename_git_prefix() {
assert!(
std::path::Path::new("README.md").exists(),
"README.md should exist in crate root"
);
let url_opt = super::hyperlink_filename("a/README.md");
assert!(
url_opt.is_some(),
"Expected Some(url::Url) for git-style prefixed path"
);
let url = url_opt.unwrap();
assert_eq!(url.scheme(), "file", "Scheme should be file");
let path_from_url = url.to_file_path().expect("Should convert URL back to path");
assert!(path_from_url.exists(), "Path from URL should exist");
let expected = std::fs::canonicalize("README.md").expect("Canonicalize README.md");
let actual = std::fs::canonicalize(&path_from_url).expect("Canonicalize URL path");
assert_eq!(actual, expected, "Canonical paths must match");
}
#[test]
fn test_split_row_with_slash_separator() {
let mut tokens: Vec<StyledToken> = ["doc", "/", "c.txt", "\t", "2023-12-15 15:43:29"]
.iter()
.map(|s| StyledToken::new(s.to_string(), Style::Context))
.collect();
let split = split_row(false, &mut tokens);
assert_eq!(split.prefix.len(), 0);
assert_eq!(
split.just_path.iter().map(|t| &t.token).collect::<Vec<_>>(),
["doc", "/"]
);
assert_eq!(
split
.just_filename
.iter()
.map(|t| &t.token)
.collect::<Vec<_>>(),
["c.txt"]
);
assert_eq!(
split.time_space,
[StyledToken::new("\t".to_string(), Style::Context)],
);
assert_eq!(
split.timestamp.iter().map(|t| &t.token).collect::<Vec<_>>(),
["2023-12-15 15:43:29"]
);
}
#[test]
fn test_split_row_with_os_separator() {
let sep = std::path::MAIN_SEPARATOR.to_string();
let mut tokens: Vec<StyledToken> = [
"a",
&sep,
"doc",
&sep,
"c.txt",
" ",
"2023-12-15 15:43:29",
]
.iter()
.map(|s| StyledToken::new(s.to_string(), Style::Context))
.collect();
let split = split_row(true, &mut tokens);
assert_eq!(
split.prefix.iter().map(|t| &t.token).collect::<Vec<_>>(),
["a", &sep]
);
assert_eq!(
split.just_path.iter().map(|t| &t.token).collect::<Vec<_>>(),
["doc", &sep]
);
assert_eq!(
split
.just_filename
.iter()
.map(|t| &t.token)
.collect::<Vec<_>>(),
["c.txt"]
);
assert_eq!(
split.time_space,
[StyledToken::new(" ".to_string(), Style::Context)],
);
assert_eq!(
split.timestamp.iter().map(|t| &t.token).collect::<Vec<_>>(),
["2023-12-15 15:43:29"]
);
}
#[test]
fn test_split_row_timeless() {
let mut tokens: Vec<StyledToken> = ["doc", "/", "c.txt"]
.iter()
.map(|s| StyledToken::new(s.to_string(), Style::Context))
.collect();
let split = split_row(false, &mut tokens);
assert_eq!(split.prefix.len(), 0);
assert_eq!(
split.just_path.iter().map(|t| &t.token).collect::<Vec<_>>(),
["doc", "/"]
);
assert_eq!(
split
.just_filename
.iter()
.map(|t| &t.token)
.collect::<Vec<_>>(),
["c.txt"]
);
assert_eq!(split.time_space, []);
assert_eq!(split.timestamp, []);
}
#[test]
fn test_split_row_pathless() {
let mut tokens: Vec<StyledToken> = ["README.md", "\t", "2023-12-15 15:43:29"]
.iter()
.map(|s| StyledToken::new(s.to_string(), Style::Context))
.collect();
let split = split_row(false, &mut tokens);
assert_eq!(split.prefix.len(), 0);
assert_eq!(split.just_path.len(), 0);
assert_eq!(
split
.just_filename
.iter()
.map(|t| &t.token)
.collect::<Vec<_>>(),
["README.md"]
);
assert_eq!(
split.time_space,
[StyledToken::new("\t".to_string(), Style::Context)],
);
assert_eq!(
split.timestamp.iter().map(|t| &t.token).collect::<Vec<_>>(),
["2023-12-15 15:43:29"]
);
}
#[test]
fn test_split_row_separator_without_time() {
let mut tokens: Vec<StyledToken> = ["README.md", "\t"]
.iter()
.map(|s| StyledToken::new(s.to_string(), Style::Context))
.collect();
let split = split_row(false, &mut tokens);
assert_eq!(split.prefix.len(), 0);
assert_eq!(split.just_path.len(), 0);
assert_eq!(
split
.just_filename
.iter()
.map(|t| &t.token)
.collect::<Vec<_>>(),
["README.md"]
);
assert_eq!(
split.time_space,
[StyledToken::new("\t".to_string(), Style::Context)],
);
assert_eq!(split.timestamp.len(), 0);
}
}