use std::path::Path;
use hjkl_buffer::Span as BufferSpan;
use ratatui::style::Style as RatStyle;
pub const PREVIEW_MAX_BYTES: u64 = 1_000_000;
#[derive(Default)]
pub struct PreviewSpans {
pub by_row: Vec<Vec<BufferSpan>>,
pub styles: Vec<RatStyle>,
}
impl PreviewSpans {
pub fn from_byte_ranges(ranges: &[(std::ops::Range<usize>, RatStyle)], bytes: &[u8]) -> Self {
let mut row_starts: Vec<usize> = vec![0];
for (i, &b) in bytes.iter().enumerate() {
if b == b'\n' {
row_starts.push(i + 1);
}
}
let row_count = row_starts.len();
let mut styles: Vec<RatStyle> = Vec::new();
let mut by_row: Vec<Vec<BufferSpan>> = vec![Vec::new(); row_count];
for (byte_range, rat) in ranges {
let style_id = match styles.iter().position(|s| s == rat) {
Some(i) => i,
None => {
styles.push(*rat);
styles.len() - 1
}
} as u32;
let span_start = byte_range.start;
let span_end = byte_range.end;
let start_row = row_starts
.partition_point(|&rs| rs <= span_start)
.saturating_sub(1);
let mut row = start_row;
while row < row_count {
let row_byte_start = row_starts[row];
let row_byte_end = row_starts
.get(row + 1)
.map(|&s| s.saturating_sub(1))
.unwrap_or(bytes.len());
if row_byte_start >= span_end {
break;
}
let local_start = span_start.saturating_sub(row_byte_start);
let local_end = span_end.min(row_byte_end) - row_byte_start;
if local_end > local_start {
by_row[row].push(BufferSpan::new(local_start, local_end, style_id));
}
row += 1;
}
}
PreviewSpans { by_row, styles }
}
}
pub fn build_preview_spans<F>(
flat: &[(std::ops::Range<usize>, &str)],
bytes: &[u8],
resolve_style: F,
) -> PreviewSpans
where
F: Fn(&str) -> Option<RatStyle>,
{
let mut row_starts: Vec<usize> = vec![0];
for (i, &b) in bytes.iter().enumerate() {
if b == b'\n' {
row_starts.push(i + 1);
}
}
let row_count = row_starts.len();
let mut styles: Vec<RatStyle> = Vec::new();
let mut by_row: Vec<Vec<BufferSpan>> = vec![Vec::new(); row_count];
for (byte_range, capture) in flat {
let Some(rat) = resolve_style(capture) else {
continue;
};
let style_id = match styles.iter().position(|s| *s == rat) {
Some(i) => i,
None => {
styles.push(rat);
styles.len() - 1
}
} as u32;
let span_start = byte_range.start;
let span_end = byte_range.end;
let start_row = row_starts
.partition_point(|&rs| rs <= span_start)
.saturating_sub(1);
let mut row = start_row;
while row < row_count {
let row_byte_start = row_starts[row];
let row_byte_end = row_starts
.get(row + 1)
.map(|&s| s.saturating_sub(1))
.unwrap_or(bytes.len());
if row_byte_start >= span_end {
break;
}
let local_start = span_start.saturating_sub(row_byte_start);
let local_end = span_end.min(row_byte_end) - row_byte_start;
if local_end > local_start {
by_row[row].push(BufferSpan::new(local_start, local_end, style_id));
}
row += 1;
}
}
PreviewSpans { by_row, styles }
}
pub fn load_preview(abs: &Path) -> (String, String) {
let meta = match std::fs::metadata(abs) {
Ok(m) => m,
Err(e) => return (String::new(), format!("{e}")),
};
if meta.len() > PREVIEW_MAX_BYTES {
let mb = meta.len() as f64 / 1_048_576.0;
return (String::new(), format!("{mb:.1}MB — too large"));
}
let bytes = match std::fs::read(abs) {
Ok(b) => b,
Err(e) => return (String::new(), format!("{e}")),
};
let scan_end = bytes.len().min(8192);
if bytes[..scan_end].contains(&0u8) {
return (String::new(), "binary".into());
}
let text = match std::str::from_utf8(&bytes) {
Ok(s) => s,
Err(_) => return (String::new(), "non-utf8".into()),
};
(text.to_owned(), String::new())
}