use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use crate::cbr;
use crate::epub;
use crate::DEFAULT_AUTHOR;
use image::imageops::FilterType;
use image::{DynamicImage, GenericImageView, GrayImage, Luma, Rgb, RgbImage};
use rayon::prelude::*;
use crate::mobi;
use crate::moire;
#[derive(Debug, Clone, Copy)]
pub struct DeviceProfile {
pub width: u32,
pub height: u32,
pub grayscale: bool,
pub name: &'static str,
}
const PROFILES: &[DeviceProfile] = &[
DeviceProfile { width: 1072, height: 1448, grayscale: true, name: "paperwhite" },
DeviceProfile { width: 1264, height: 1680, grayscale: true, name: "oasis" },
DeviceProfile { width: 1860, height: 2480, grayscale: true, name: "scribe" },
DeviceProfile { width: 1072, height: 1448, grayscale: true, name: "basic" },
DeviceProfile { width: 1264, height: 1680, grayscale: false, name: "colorsoft" },
DeviceProfile { width: 1200, height: 1920, grayscale: false, name: "fire-hd-10" },
DeviceProfile { width: 1236, height: 1648, grayscale: true, name: "kpw5" },
DeviceProfile { width: 1986, height: 2648, grayscale: true, name: "scribe2025" },
DeviceProfile { width: 1240, height: 1860, grayscale: true, name: "kindle2024" },
];
pub fn get_profile(name: &str) -> Option<DeviceProfile> {
let lower = name.to_lowercase();
PROFILES.iter().find(|p| p.name == lower).copied()
}
pub fn valid_device_names() -> String {
PROFILES.iter().map(|p| p.name).collect::<Vec<_>>().join(", ")
}
#[derive(Debug, Clone)]
pub enum CoverSource {
PageNumber(usize),
FilePath(PathBuf),
}
#[derive(Debug, Clone)]
pub struct ComicOptions {
pub rtl: bool,
pub split: bool,
pub crop: u8,
pub enhance: bool,
pub webtoon: bool,
pub panel_view: bool,
pub jpeg_quality: u8,
pub max_height: u32,
pub embed_source: bool,
pub doc_type: Option<String>,
pub title_override: Option<String>,
pub author_override: Option<String>,
pub language: Option<String>,
pub cover: Option<CoverSource>,
pub rotate_spreads: bool,
pub panel_reading_order: Option<String>,
pub cover_fill: bool,
pub kindle_limits: bool,
pub kf8_only: bool,
pub self_check: bool,
pub kindlegen_parity: bool,
}
impl Default for ComicOptions {
fn default() -> Self {
ComicOptions {
rtl: false,
split: true,
crop: 2,
enhance: true,
webtoon: false,
panel_view: true,
jpeg_quality: 85,
max_height: 65536,
embed_source: false,
doc_type: None,
title_override: None,
author_override: None,
language: None,
cover: None,
rotate_spreads: false,
panel_reading_order: None,
cover_fill: false,
kindle_limits: false,
kf8_only: false,
self_check: true,
kindlegen_parity: false,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct ComicMetadata {
pub title: Option<String>,
pub series: Option<String>,
pub number: Option<String>,
pub writers: Vec<String>,
pub pencillers: Vec<String>,
pub inkers: Vec<String>,
pub summary: Option<String>,
pub manga_rtl: bool,
pub language: Option<String>,
pub year: Option<String>,
pub month: Option<String>,
}
impl ComicMetadata {
pub fn effective_title(&self) -> Option<String> {
match (&self.series, &self.number, &self.title) {
(Some(series), Some(num), Some(title)) => {
Some(format!("{} #{} - {}", series, num, title))
}
(Some(series), Some(num), None) => Some(format!("{} #{}", series, num)),
(Some(series), None, Some(title)) => Some(format!("{} - {}", series, title)),
(None, _, Some(title)) => Some(title.clone()),
_ => self.series.clone(),
}
}
pub fn creators(&self) -> Vec<String> {
let mut all = Vec::new();
all.extend(self.writers.iter().cloned());
all.extend(self.pencillers.iter().cloned());
all.extend(self.inkers.iter().cloned());
all
}
}
struct ProcessedImage {
index: usize,
jpeg_data: Vec<u8>,
width: u32,
height: u32,
panels: Option<Vec<PanelRect>>,
}
pub fn build_comic(
input: &Path,
output: &Path,
profile: &DeviceProfile,
) -> Result<(), Box<dyn std::error::Error>> {
build_comic_with_options(input, output, profile, &ComicOptions::default())
}
pub fn build_comic_with_options(
input: &Path,
output: &Path,
profile: &DeviceProfile,
options: &ComicOptions,
) -> Result<(), Box<dyn std::error::Error>> {
let (source_images, cbz_temp_dir) = collect_images(input)?;
if source_images.is_empty() {
return Err("No images found in input".into());
}
eprintln!("Found {} images", source_images.len());
let is_webtoon = options.webtoon || detect_webtoon(&source_images);
let source_images = if is_webtoon {
if options.webtoon {
eprintln!("Webtoon mode enabled");
} else {
eprintln!("Detected webtoon format");
}
let pages = webtoon_preprocess(&source_images, profile, options)?;
eprintln!("Split webtoon into {} pages", pages.len());
pages
} else {
source_images
};
let metadata = find_and_parse_comic_info(input, cbz_temp_dir.as_deref());
let rtl = options.rtl || metadata.as_ref().map_or(false, |m| m.manga_rtl);
if rtl {
eprintln!("RTL (manga) mode enabled");
}
eprintln!("Processing images for {} ({}x{}, {})...",
profile.name, profile.width, profile.height,
if profile.grayscale { "grayscale" } else { "color" });
let cover_source_idx = match &options.cover {
Some(CoverSource::PageNumber(n)) => Some(n.saturating_sub(1)),
Some(CoverSource::FilePath(_)) => None, None => Some(0),
};
let total = source_images.len();
let processed_groups: Vec<Option<(usize, Vec<Vec<u8>>)>> = source_images
.par_iter()
.enumerate()
.map(|(idx, img_path)| {
if idx % 10 == 0 || idx == total - 1 {
eprintln!("Processing image {}/{}...", idx + 1, total);
}
let is_cover = options.cover_fill && cover_source_idx == Some(idx);
match process_image_pipeline(img_path, profile, options, is_cover) {
Ok(jpeg_pages) => Some((idx, jpeg_pages)),
Err(e) => {
eprintln!("Warning: skipping {} ({})", img_path.display(), e);
None
}
}
})
.collect();
let processed_groups: Vec<(usize, Vec<Vec<u8>>)> = processed_groups.into_iter().flatten().collect();
if processed_groups.is_empty() {
return Err("All images failed to load - no valid images to process".into());
}
let mut processed_groups = processed_groups;
processed_groups.sort_by_key(|(idx, _)| *idx);
let mut processed: Vec<ProcessedImage> = Vec::new();
let mut page_idx = 0;
for (_orig_idx, pages) in &processed_groups {
for jpeg_data in pages {
let (w, h) = image::load_from_memory(jpeg_data)
.map(|img| (img.width(), img.height()))
.unwrap_or((profile.width, profile.height));
processed.push(ProcessedImage {
index: page_idx,
jpeg_data: jpeg_data.clone(),
width: w,
height: h,
panels: None, });
page_idx += 1;
}
}
if rtl {
processed.reverse();
for (i, page) in processed.iter_mut().enumerate() {
page.index = i;
}
}
let total_image_bytes: usize = processed.iter().map(|p| p.jpeg_data.len()).sum();
eprintln!("Processed into {} pages ({:.1} MB total JPEG data)",
processed.len(),
total_image_bytes as f64 / (1024.0 * 1024.0));
if options.panel_view {
eprintln!("Detecting panels for Panel View...");
let panel_results = detect_panels_for_pages(&processed);
let mut panel_count = 0;
for (i, panels) in panel_results.into_iter().enumerate() {
if let Some(ref p) = panels {
panel_count += 1;
eprintln!(" Page {}: {} panels", i + 1, p.len());
}
processed[i].panels = panels;
}
eprintln!("Panel View: detected panels on {}/{} pages", panel_count, processed.len());
let reading_order = resolve_panel_reading_order(
options.panel_reading_order.as_deref(), options.rtl,
);
if reading_order != "horizontal-lr" {
eprintln!("Panel View: sorting panels in {} order", reading_order);
}
for page in processed.iter_mut() {
if let Some(ref mut panels) = page.panels {
sort_panels_by_reading_order(panels, reading_order);
}
}
}
let temp_dir = create_temp_dir(output)?;
let opf_path = write_fixed_layout_epub_v2(
&temp_dir, &processed, profile, rtl, metadata.as_ref(), options.panel_view, options,
)?;
match crate::validate::validate_opf(&opf_path) {
Ok(report) => {
for finding in &report.findings {
if !matches!(finding.level, crate::validate::Level::Info) {
eprintln!(" {}", finding);
}
}
let errors = report.error_count();
if errors > 0 {
if temp_dir.exists() {
let _ = fs::remove_dir_all(&temp_dir);
}
if let Some(ref cbz_dir) = cbz_temp_dir {
if cbz_dir.exists() {
let _ = fs::remove_dir_all(cbz_dir);
}
}
return Err(format!(
"Comic OPF validation failed with {} errors",
errors
)
.into());
}
}
Err(e) => {
eprintln!("Warning: could not validate comic OPF: {}", e);
}
}
let opf_snapshot: Option<(String, String)> = crate::opf::OPFData::parse(&opf_path)
.ok()
.map(|o| (o.title.clone(), o.author.clone()));
eprintln!("Building MOBI...");
let srcs_data = if options.embed_source {
epub::create_epub_from_dir(&temp_dir).ok()
} else {
None
};
let result = mobi::build_mobi(
&opf_path,
output,
false, false, srcs_data.as_deref(),
false, true, false, options.kf8_only,
options.doc_type.as_deref(),
options.kindle_limits,
options.self_check,
options.kindlegen_parity,
false, );
if temp_dir.exists() {
if let Err(e) = fs::remove_dir_all(&temp_dir) {
eprintln!("Warning: failed to clean up temp dir {}: {}", temp_dir.display(), e);
}
}
if let Some(cbz_dir) = cbz_temp_dir {
if cbz_dir.exists() {
if let Err(e) = fs::remove_dir_all(&cbz_dir) {
eprintln!("Warning: failed to clean up archive extraction dir {}: {}", cbz_dir.display(), e);
}
}
}
result?;
let (title, author) = match opf_snapshot.as_ref() {
Some((t, a)) => (t.as_str(), a.as_str()),
None => ("", ""),
};
let expected = crate::mobi_check::ExpectedMetadata {
title: if title.is_empty() { None } else { Some(title) },
author: if author.is_empty() { None } else { Some(author) },
is_comic: true,
is_dictionary: false,
};
let report = crate::mobi_check::check_mobi_file(output, &expected)?;
crate::mobi_check::report_result(output, &report)?;
Ok(())
}
fn collect_images(input: &Path) -> Result<(Vec<PathBuf>, Option<PathBuf>), Box<dyn std::error::Error>> {
if input.is_dir() {
Ok((collect_images_from_dir(input)?, None))
} else if let Some(ext) = input.extension() {
let ext_lower = ext.to_string_lossy().to_lowercase();
match ext_lower.as_str() {
"cbz" | "zip" => {
let (images, temp_dir) = extract_cbz(input)?;
Ok((images, Some(temp_dir)))
}
"epub" => {
let (images, temp_dir) = extract_epub_images(input)?;
Ok((images, Some(temp_dir)))
}
"cbr" | "rar" => {
let (images, temp_dir) = cbr::extract_cbr(input)?;
Ok((images, Some(temp_dir)))
}
"pdf" => Err("PDF support coming soon".into()),
_ => Err(format!("Unsupported input format: .{}", ext_lower).into()),
}
} else {
Err("Cannot determine input type (not a directory and has no extension)".into())
}
}
fn extract_epub_images(epub_path: &Path) -> Result<(Vec<PathBuf>, PathBuf), Box<dyn std::error::Error>> {
use crate::opf::OPFData;
let (temp_dir, opf_path) = epub::extract_epub(epub_path)?;
let opf = OPFData::parse(&opf_path)?;
let opf_dir = opf_path.parent().unwrap_or(Path::new("."));
let mut ordered_images: Vec<PathBuf> = Vec::new();
let mut seen = std::collections::HashSet::new();
for (_id, href) in &opf.spine_items {
let xhtml_path = opf_dir.join(href);
if !xhtml_path.exists() {
continue;
}
let xhtml_dir = xhtml_path.parent().unwrap_or(opf_dir);
let content = match fs::read_to_string(&xhtml_path) {
Ok(c) => c,
Err(_) => continue,
};
let image_refs = extract_image_refs_from_xhtml(&content);
for img_ref in image_refs {
let img_path = xhtml_dir.join(&img_ref);
let img_path = img_path.canonicalize().unwrap_or(img_path);
if img_path.exists() && is_image_file(&img_path) && seen.insert(img_path.clone()) {
ordered_images.push(img_path);
}
}
}
if ordered_images.is_empty() {
eprintln!("No images found via EPUB spine, falling back to directory scan");
ordered_images = collect_images_from_dir(&temp_dir)?;
}
if ordered_images.is_empty() {
let _ = fs::remove_dir_all(&temp_dir);
return Err("No image files found in EPUB archive".into());
}
eprintln!("EPUB: found {} images in spine order", ordered_images.len());
Ok((ordered_images, temp_dir))
}
pub fn extract_image_refs_from_xhtml(content: &str) -> Vec<String> {
use quick_xml::events::Event;
use quick_xml::Reader;
let mut refs = Vec::new();
let mut reader = Reader::from_str(content);
reader.config_mut().trim_text(true);
let mut buf = Vec::new();
loop {
match reader.read_event_into(&mut buf) {
Ok(Event::Start(ref e)) | Ok(Event::Empty(ref e)) => {
let tag_name = e.name();
let tag = std::str::from_utf8(tag_name.as_ref()).unwrap_or("");
let local = tag.rsplit(':').next().unwrap_or(tag);
match local {
"img" => {
for attr in e.attributes().flatten() {
if attr.key.as_ref() == b"src" {
let src = String::from_utf8_lossy(&attr.value).to_string();
if !src.is_empty() {
refs.push(src);
}
}
}
}
"image" => {
for attr in e.attributes().flatten() {
let key = std::str::from_utf8(attr.key.as_ref()).unwrap_or("");
if key == "href" || key.ends_with(":href") {
let href = String::from_utf8_lossy(&attr.value).to_string();
if !href.is_empty() {
refs.push(href);
}
}
}
}
_ => {}
}
}
Ok(Event::Eof) => break,
Err(_) => {
return extract_image_refs_regex(content);
}
_ => {}
}
buf.clear();
}
if refs.is_empty() {
return extract_image_refs_regex(content);
}
refs
}
pub fn extract_image_refs_regex(content: &str) -> Vec<String> {
use regex::Regex;
use std::sync::OnceLock;
static IMG_SRC_RE: OnceLock<Regex> = OnceLock::new();
static IMAGE_HREF_RE: OnceLock<Regex> = OnceLock::new();
let img_re = IMG_SRC_RE.get_or_init(|| {
Regex::new(r#"<img\s[^>]*src="([^"]+)""#).unwrap()
});
let image_re = IMAGE_HREF_RE.get_or_init(|| {
Regex::new(r#"<image\s[^>]*(?:xlink:)?href="([^"]+)""#).unwrap()
});
let mut refs = Vec::new();
for cap in img_re.captures_iter(content) {
if let Some(m) = cap.get(1) {
refs.push(m.as_str().to_string());
}
}
for cap in image_re.captures_iter(content) {
if let Some(m) = cap.get(1) {
refs.push(m.as_str().to_string());
}
}
refs
}
fn collect_images_from_dir(dir: &Path) -> Result<Vec<PathBuf>, Box<dyn std::error::Error>> {
let mut images: Vec<PathBuf> = Vec::new();
collect_images_recursive(dir, &mut images)?;
if images.is_empty() {
return Err(format!("No image files found in {}", dir.display()).into());
}
images.sort_by(|a, b| natural_sort_key(a).cmp(&natural_sort_key(b)));
Ok(images)
}
fn collect_images_recursive(dir: &Path, images: &mut Vec<PathBuf>) -> Result<(), Box<dyn std::error::Error>> {
let mut entries: Vec<_> = fs::read_dir(dir)?
.filter_map(|e| e.ok())
.collect();
entries.sort_by_key(|e| e.file_name());
for entry in entries {
let path = entry.path();
if path.is_dir() {
collect_images_recursive(&path, images)?;
} else if is_image_file(&path) {
images.push(path);
}
}
Ok(())
}
fn is_image_file(path: &Path) -> bool {
match path.extension().and_then(|e| e.to_str()) {
Some(ext) => {
let lower = ext.to_lowercase();
matches!(lower.as_str(), "jpg" | "jpeg" | "png" | "gif" | "webp" | "bmp" | "tiff" | "tif")
}
None => false,
}
}
fn natural_sort_key(path: &Path) -> Vec<NaturalSortPart> {
let name = path.file_name().unwrap_or_default().to_string_lossy();
let mut parts = Vec::new();
let mut current_num = String::new();
let mut current_text = String::new();
for ch in name.chars() {
if ch.is_ascii_digit() {
if !current_text.is_empty() {
parts.push(NaturalSortPart::Text(current_text.to_lowercase()));
current_text.clear();
}
current_num.push(ch);
} else {
if !current_num.is_empty() {
parts.push(NaturalSortPart::Number(current_num.parse::<u64>().unwrap_or(0)));
current_num.clear();
}
current_text.push(ch);
}
}
if !current_num.is_empty() {
parts.push(NaturalSortPart::Number(current_num.parse::<u64>().unwrap_or(0)));
}
if !current_text.is_empty() {
parts.push(NaturalSortPart::Text(current_text.to_lowercase()));
}
parts
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
enum NaturalSortPart {
Number(u64),
Text(String),
}
fn extract_cbz(cbz_path: &Path) -> Result<(Vec<PathBuf>, PathBuf), Box<dyn std::error::Error>> {
let file = fs::File::open(cbz_path)?;
let mut archive = zip::ZipArchive::new(file)?;
let stem = cbz_path.file_stem().unwrap_or_default().to_string_lossy();
let parent = cbz_path.parent().unwrap_or(Path::new("."));
let extract_dir = parent.join(format!(".kindling_cbz_{}", stem));
if extract_dir.exists() {
fs::remove_dir_all(&extract_dir)?;
}
fs::create_dir_all(&extract_dir)?;
let mut image_paths: Vec<PathBuf> = Vec::new();
for i in 0..archive.len() {
let mut entry = archive.by_index(i)?;
let name = entry.name().to_string();
if name.ends_with('/') || name.starts_with("__MACOSX") || name.contains("/.") {
continue;
}
let out_path = extract_dir.join(&name);
let lower_name = name.to_lowercase();
if lower_name == "comicinfo.xml" || lower_name.ends_with("/comicinfo.xml") {
if let Some(parent_dir) = out_path.parent() {
fs::create_dir_all(parent_dir)?;
}
let mut buf = Vec::new();
entry.read_to_end(&mut buf)?;
fs::write(&out_path, &buf)?;
continue;
}
if !is_image_file(Path::new(&name)) {
continue;
}
if let Some(parent_dir) = out_path.parent() {
fs::create_dir_all(parent_dir)?;
}
let mut buf = Vec::new();
entry.read_to_end(&mut buf)?;
fs::write(&out_path, &buf)?;
image_paths.push(out_path);
}
image_paths.sort_by(|a, b| natural_sort_key(a).cmp(&natural_sort_key(b)));
if image_paths.is_empty() {
let _ = fs::remove_dir_all(&extract_dir);
return Err("No image files found in CBZ archive".into());
}
Ok((image_paths, extract_dir))
}
fn process_image_pipeline(
path: &Path,
profile: &DeviceProfile,
options: &ComicOptions,
cover_fill: bool,
) -> Result<Vec<Vec<u8>>, Box<dyn std::error::Error>> {
let mut img = image::open(path)?;
let (w, h) = img.dimensions();
if w == 0 || h == 0 {
return Err(format!("zero dimensions ({}x{})", w, h).into());
}
if !profile.grayscale && is_grayscale_source(&img) {
moire::remove_moire(&mut img);
}
let img = match options.crop {
0 => img,
1 => crop_borders(&img),
_ => {
let cropped = crop_borders(&img);
crop_page_numbers(&cropped)
}
};
let pages = if options.rotate_spreads && is_double_page_spread(&img) {
vec![img.rotate90()]
} else if options.split && is_double_page_spread(&img) {
let (left, right) = split_spread(&img);
vec![left, right]
} else {
vec![img]
};
let mut results = Vec::new();
for (page_idx, page) in pages.into_iter().enumerate() {
let page = if options.enhance && profile.grayscale {
enhance_image(&page)
} else {
page
};
let page = if cover_fill && page_idx == 0 {
cover_fill_crop(&page, profile.width, profile.height)
} else {
page
};
let page = page.resize(profile.width, profile.height, FilterType::Lanczos3);
let page = if profile.grayscale {
DynamicImage::ImageLuma8(page.to_luma8())
} else {
page
};
let jpeg_buf = encode_jpeg(&page, options.jpeg_quality)?;
results.push(jpeg_buf);
}
Ok(results)
}
fn cover_fill_crop(img: &DynamicImage, target_width: u32, target_height: u32) -> DynamicImage {
let (w, h) = img.dimensions();
let target_ratio = target_width as f64 / target_height as f64;
let img_ratio = w as f64 / h as f64;
if (img_ratio - target_ratio).abs() < 0.001 {
return img.clone();
}
let (crop_w, crop_h) = if img_ratio > target_ratio {
let new_w = (h as f64 * target_ratio).round() as u32;
(new_w.min(w), h)
} else {
let new_h = (w as f64 / target_ratio).round() as u32;
(w, new_h.min(h))
};
let x = (w - crop_w) / 2;
let y = (h - crop_h) / 2;
image::imageops::crop_imm(img, x, y, crop_w, crop_h).to_image().into()
}
fn encode_jpeg(img: &DynamicImage, quality: u8) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
let mut jpeg_buf = Vec::new();
let cursor = std::io::Cursor::new(&mut jpeg_buf);
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(cursor, quality);
img.write_with_encoder(encoder)?;
Ok(jpeg_buf)
}
fn is_grayscale_source(img: &DynamicImage) -> bool {
match img {
DynamicImage::ImageLuma8(_) | DynamicImage::ImageLuma16(_) => true,
_ => {
let (w, h) = img.dimensions();
if w == 0 || h == 0 {
return true;
}
let rgb = img.to_rgb8();
let step_x = (w / 32).max(1);
let step_y = (h / 32).max(1);
let mut max_channel_diff: u8 = 0;
let mut x = 0;
while x < w {
let mut y = 0;
while y < h {
let p = rgb.get_pixel(x, y).0;
let r = p[0];
let g = p[1];
let b = p[2];
let diff = (r.max(g).max(b)) - (r.min(g).min(b));
if diff > max_channel_diff {
max_channel_diff = diff;
}
if max_channel_diff > 10 {
return false;
}
y += step_y;
}
x += step_x;
}
true
}
}
}
pub fn is_double_page_spread(img: &DynamicImage) -> bool {
let (w, h) = img.dimensions();
w > h
}
pub fn split_spread(img: &DynamicImage) -> (DynamicImage, DynamicImage) {
let (w, h) = img.dimensions();
let mid = w / 2;
let left = img.crop_imm(0, 0, mid, h);
let right = img.crop_imm(mid, 0, w - mid, h);
(left, right)
}
pub fn crop_borders(img: &DynamicImage) -> DynamicImage {
let gray = img.to_luma8();
let (w, h) = gray.dimensions();
if w < 10 || h < 10 {
return img.clone();
}
let threshold: f64 = 20.0; let min_border_frac: f64 = 0.02;
let edge_top = row_average(&gray, 0);
let mut top = 0u32;
for y in 0..h {
if (row_average(&gray, y) - edge_top).abs() > threshold {
break;
}
top = y + 1;
}
if (top as f64) < (h as f64 * min_border_frac) {
top = 0;
}
let edge_bottom = row_average(&gray, h - 1);
let mut bottom = h;
for y in (0..h).rev() {
if (row_average(&gray, y) - edge_bottom).abs() > threshold {
break;
}
bottom = y;
}
if ((h - bottom) as f64) < (h as f64 * min_border_frac) {
bottom = h;
}
let edge_left = col_average(&gray, 0);
let mut left = 0u32;
for x in 0..w {
if (col_average(&gray, x) - edge_left).abs() > threshold {
break;
}
left = x + 1;
}
if (left as f64) < (w as f64 * min_border_frac) {
left = 0;
}
let edge_right = col_average(&gray, w - 1);
let mut right = w;
for x in (0..w).rev() {
if (col_average(&gray, x) - edge_right).abs() > threshold {
break;
}
right = x;
}
if ((w - right) as f64) < (w as f64 * min_border_frac) {
right = w;
}
if left >= right || top >= bottom {
return img.clone();
}
if top == 0 && bottom == h && left == 0 && right == w {
return img.clone();
}
img.crop_imm(left, top, right - left, bottom - top)
}
fn row_average(img: &GrayImage, y: u32) -> f64 {
let w = img.width();
let sum: f64 = (0..w).map(|x| img.get_pixel(x, y).0[0] as f64).sum();
sum / w as f64
}
fn col_average(img: &GrayImage, x: u32) -> f64 {
let h = img.height();
let sum: f64 = (0..h).map(|y| img.get_pixel(x, y).0[0] as f64).sum();
sum / h as f64
}
pub fn crop_page_numbers(img: &DynamicImage) -> DynamicImage {
let gray = img.to_luma8();
let (w, h) = gray.dimensions();
if w < 20 || h < 40 {
return img.clone();
}
const STRIP_FRAC: f64 = 0.06; const MAX_INK_FRAC: f64 = 0.08;
const MIN_INK_FRAC: f64 = 0.0005;
const COLOR_TOL: u8 = 30;
let strip_h = ((h as f64 * STRIP_FRAC).round() as u32).max(3);
let bg = detect_strip_background(&gray, w, h);
let crop_bottom = if h > strip_h {
is_page_number_strip(&gray, w, h.saturating_sub(strip_h), h, bg, COLOR_TOL, MIN_INK_FRAC, MAX_INK_FRAC)
} else {
0
};
let crop_top = if h > strip_h {
is_page_number_strip(&gray, w, 0, strip_h, bg, COLOR_TOL, MIN_INK_FRAC, MAX_INK_FRAC)
} else {
0
};
let max_total = (h as f64 * 0.10) as u32;
if crop_top + crop_bottom > max_total || crop_top + crop_bottom == 0 {
return img.clone();
}
let new_top = crop_top;
let new_bottom = h - crop_bottom;
if new_top >= new_bottom {
return img.clone();
}
img.crop_imm(0, new_top, w, new_bottom - new_top)
}
fn detect_strip_background(gray: &GrayImage, w: u32, h: u32) -> u8 {
let samples = [
(0, 0),
(w - 1, 0),
(0, h - 1),
(w - 1, h - 1),
(w / 2, 0),
(w / 2, h - 1),
(0, h / 2),
(w - 1, h / 2),
];
let sum: u32 = samples.iter().map(|&(x, y)| gray.get_pixel(x, y).0[0] as u32).sum();
(sum / samples.len() as u32) as u8
}
fn is_page_number_strip(
gray: &GrayImage,
w: u32,
y_start: u32,
y_end: u32,
bg: u8,
tol: u8,
min_ink: f64,
max_ink: f64,
) -> u32 {
let strip_h = y_end - y_start;
let total_pixels = (w as u64) * (strip_h as u64);
if total_pixels == 0 {
return 0;
}
let mut ink_count: u64 = 0;
let mut ink_x_min: u32 = w;
let mut ink_x_max: u32 = 0;
let mut ink_y_min: u32 = y_end;
let mut ink_y_max: u32 = y_start;
for y in y_start..y_end {
for x in 0..w {
let v = gray.get_pixel(x, y).0[0];
let diff = if v > bg { v - bg } else { bg - v };
if diff > tol {
ink_count += 1;
if x < ink_x_min { ink_x_min = x; }
if x > ink_x_max { ink_x_max = x; }
if y < ink_y_min { ink_y_min = y; }
if y > ink_y_max { ink_y_max = y; }
}
}
}
let ink_frac = ink_count as f64 / total_pixels as f64;
if ink_frac < min_ink {
return 0;
}
if ink_frac > max_ink {
return 0;
}
if ink_x_min >= ink_x_max {
return 0;
}
let ink_width_frac = (ink_x_max - ink_x_min) as f64 / w as f64;
if ink_width_frac > 0.35 {
return 0;
}
strip_h
}
pub fn enhance_image(img: &DynamicImage) -> DynamicImage {
let gray = img.to_luma8();
let (w, h) = gray.dimensions();
let total_pixels = (w * h) as f64;
let mut histogram = [0u32; 256];
for pixel in gray.pixels() {
histogram[pixel.0[0] as usize] += 1;
}
let clip_count = (total_pixels * 0.005) as u32;
let mut low = 0u8;
let mut cumulative = 0u32;
for i in 0..256 {
cumulative += histogram[i];
if cumulative >= clip_count {
low = i as u8;
break;
}
}
let mut high = 255u8;
cumulative = 0;
for i in (0..256).rev() {
cumulative += histogram[i];
if cumulative >= clip_count {
high = i as u8;
break;
}
}
if high <= low {
return img.clone();
}
let gamma: f64 = 0.8;
let range = (high - low) as f64;
let mut lut = [0u8; 256];
for i in 0..256 {
let clamped = (i as u8).max(low).min(high);
let normalized = (clamped - low) as f64 / range; let gamma_corrected = normalized.powf(gamma);
lut[i] = (gamma_corrected * 255.0).round().clamp(0.0, 255.0) as u8;
}
let (w, h) = img.dimensions();
match img {
DynamicImage::ImageLuma8(_) => {
let mut out = GrayImage::new(w, h);
for (x, y, pixel) in gray.enumerate_pixels() {
out.put_pixel(x, y, Luma([lut[pixel.0[0] as usize]]));
}
DynamicImage::ImageLuma8(out)
}
_ => {
let rgb = img.to_rgb8();
let mut out = RgbImage::new(w, h);
for (x, y, pixel) in rgb.enumerate_pixels() {
out.put_pixel(x, y, Rgb([
lut[pixel.0[0] as usize],
lut[pixel.0[1] as usize],
lut[pixel.0[2] as usize],
]));
}
DynamicImage::ImageRgb8(out)
}
}
}
pub fn detect_webtoon(images: &[PathBuf]) -> bool {
if images.is_empty() {
return false;
}
images.iter().all(|path| {
match image::image_dimensions(path) {
Ok((w, h)) => h > 3 * w,
Err(_) => false,
}
})
}
fn webtoon_preprocess(
source_images: &[PathBuf],
profile: &DeviceProfile,
options: &ComicOptions,
) -> Result<Vec<PathBuf>, Box<dyn std::error::Error>> {
let mut images: Vec<DynamicImage> = Vec::new();
for p in source_images {
match image::open(p) {
Ok(img) => {
let (w, h) = img.dimensions();
if w == 0 || h == 0 {
eprintln!("Warning: skipping {} (zero dimensions {}x{})", p.display(), w, h);
continue;
}
images.push(img);
}
Err(e) => {
eprintln!("Warning: skipping {} ({})", p.display(), e);
}
}
}
if images.is_empty() {
return Err("All images failed to load - no valid images to process".into());
}
let total_height: u32 = images.iter().map(|img| img.height()).sum();
let max_height = options.max_height;
let chunks: Vec<Vec<DynamicImage>> = if total_height > max_height {
eprintln!(
"Warning: merged strip height ({}) exceeds --max-height ({}), splitting into chunks",
total_height, max_height
);
let mut chunks = Vec::new();
let mut current_chunk: Vec<DynamicImage> = Vec::new();
let mut current_height: u32 = 0;
for img in images {
let h = img.height();
if !current_chunk.is_empty() && current_height + h > max_height {
chunks.push(std::mem::take(&mut current_chunk));
current_height = 0;
}
current_height += h;
current_chunk.push(img);
}
if !current_chunk.is_empty() {
chunks.push(current_chunk);
}
eprintln!("Processing {} chunks", chunks.len());
chunks
} else {
vec![images]
};
let temp_dir = std::env::temp_dir().join(format!(
"kindling_webtoon_{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_millis()
));
fs::create_dir_all(&temp_dir)?;
let mut all_paths: Vec<PathBuf> = Vec::new();
let mut page_offset = 0usize;
for (chunk_idx, chunk_images) in chunks.iter().enumerate() {
let strip = webtoon_merge(chunk_images);
let (strip_w, strip_h) = strip.dimensions();
if chunks.len() > 1 {
eprintln!("Chunk {}: merged strip {}x{}", chunk_idx + 1, strip_w, strip_h);
} else {
eprintln!("Merged webtoon strip: {}x{}", strip_w, strip_h);
}
let pages = webtoon_split(&strip, profile.height);
if chunks.len() > 1 {
eprintln!("Chunk {}: split into {} pages", chunk_idx + 1, pages.len());
} else {
eprintln!("Split into {} page images", pages.len());
}
let offset = page_offset;
let paths: Vec<PathBuf> = pages
.into_par_iter()
.enumerate()
.map(|(i, page)| {
let path = temp_dir.join(format!("page_{:04}.png", offset + i));
page.save(&path).expect("Failed to save webtoon page");
path
})
.collect();
let mut paths = paths;
paths.sort_by(|a, b| natural_sort_key(a).cmp(&natural_sort_key(b)));
page_offset += paths.len();
all_paths.extend(paths);
}
Ok(all_paths)
}
pub fn webtoon_merge(images: &[DynamicImage]) -> DynamicImage {
if images.len() == 1 {
return images[0].clone();
}
let max_width = images.iter().map(|img| img.width()).max().unwrap_or(0);
let total_height: u32 = images.iter().map(|img| img.height()).sum();
let bg_color = detect_background_color(&images[0]);
let mut canvas = RgbImage::from_pixel(max_width, total_height, bg_color);
let mut y_offset = 0u32;
for img in images {
let rgb = img.to_rgb8();
let (w, h) = (rgb.width(), rgb.height());
let x_offset = (max_width - w) / 2;
for py in 0..h {
for px in 0..w {
canvas.put_pixel(x_offset + px, y_offset + py, *rgb.get_pixel(px, py));
}
}
y_offset += h;
}
DynamicImage::ImageRgb8(canvas)
}
fn detect_background_color(img: &DynamicImage) -> Rgb<u8> {
let rgb = img.to_rgb8();
let (w, h) = (rgb.width(), rgb.height());
if w == 0 || h == 0 {
return Rgb([255, 255, 255]);
}
let mut sum_r: u64 = 0;
let mut sum_g: u64 = 0;
let mut sum_b: u64 = 0;
let mut count: u64 = 0;
for x in 0..w {
let p = rgb.get_pixel(x, 0);
sum_r += p.0[0] as u64;
sum_g += p.0[1] as u64;
sum_b += p.0[2] as u64;
count += 1;
}
for x in 0..w {
let p = rgb.get_pixel(x, h - 1);
sum_r += p.0[0] as u64;
sum_g += p.0[1] as u64;
sum_b += p.0[2] as u64;
count += 1;
}
if count == 0 {
return Rgb([255, 255, 255]);
}
let avg_r = (sum_r / count) as u8;
let avg_g = (sum_g / count) as u8;
let avg_b = (sum_b / count) as u8;
let luminance = (avg_r as u32 + avg_g as u32 + avg_b as u32) / 3;
if luminance < 128 {
Rgb([0, 0, 0])
} else {
Rgb([255, 255, 255])
}
}
pub fn webtoon_split(strip: &DynamicImage, device_height: u32) -> Vec<DynamicImage> {
let (w, h) = strip.dimensions();
if h <= device_height {
return vec![strip.clone()];
}
let gray = strip.to_luma8();
let target = device_height;
let margin = (target as f64 * 0.20) as u32;
let overlap = (target as f64 * 0.10) as u32;
let mut pages = Vec::new();
let mut y_start = 0u32;
while y_start < h {
let remaining = h - y_start;
if remaining <= target + margin {
pages.push(strip.crop_imm(0, y_start, w, remaining));
break;
}
let search_lo = target.saturating_sub(margin);
let search_hi = (target + margin).min(remaining);
let (best_y, gutter_found) = find_best_gutter(&gray, y_start, search_lo, search_hi, w);
let cut_y = y_start + best_y;
pages.push(strip.crop_imm(0, y_start, w, best_y));
if gutter_found {
y_start = cut_y;
} else {
y_start = cut_y.saturating_sub(overlap);
}
}
pages
}
fn find_best_gutter(
gray: &GrayImage,
y_start: u32,
lo: u32,
hi: u32,
width: u32,
) -> (u32, bool) {
let target_mid = (lo + hi) / 2;
let mut best_offset = target_mid;
let mut best_score = f64::MAX;
let img_height = gray.height();
let half_window: u32 = 2;
for offset in lo..=hi {
let y = y_start + offset;
if y >= img_height {
break;
}
let win_lo = y.saturating_sub(half_window);
let win_hi = (y + half_window + 1).min(img_height);
let mut var_sum = 0.0;
let mut count = 0u32;
for wy in win_lo..win_hi {
var_sum += row_variance(gray, wy, width);
count += 1;
}
let avg_variance = if count > 0 { var_sum / count as f64 } else { f64::MAX };
if avg_variance < best_score {
best_score = avg_variance;
best_offset = offset;
}
}
if best_score > 100.0 {
return (target_mid, false);
}
(best_offset, true)
}
fn row_variance(gray: &GrayImage, y: u32, width: u32) -> f64 {
if width == 0 {
return 0.0;
}
let mut sum: f64 = 0.0;
let mut sum_sq: f64 = 0.0;
for x in 0..width {
let v = gray.get_pixel(x, y).0[0] as f64;
sum += v;
sum_sq += v * v;
}
let n = width as f64;
let mean = sum / n;
(sum_sq / n) - (mean * mean)
}
#[derive(Debug, Clone, PartialEq)]
pub struct PanelRect {
pub x: f64,
pub y: f64,
pub w: f64,
pub h: f64,
}
pub fn detect_panels(img: &DynamicImage) -> Vec<PanelRect> {
let gray = img.to_luma8();
let (w, h) = gray.dimensions();
if w < 20 || h < 20 {
return Vec::new();
}
let variance_threshold: f64 = 50.0;
let min_gutter_height = ((h as f64) * 0.005).max(2.0) as u32; let min_gutter_width = ((w as f64) * 0.005).max(2.0) as u32;
let h_gutters = find_horizontal_gutters(&gray, w, h, variance_threshold, min_gutter_height);
let h_strips = strips_from_gutters(&h_gutters, h);
if h_strips.len() <= 1 {
let v_gutters = find_vertical_gutters(&gray, 0, h, w, variance_threshold, min_gutter_width);
let v_strips = strips_from_gutters(&v_gutters, w);
if v_strips.len() <= 1 {
return Vec::new();
}
return v_strips
.iter()
.map(|&(x_start, x_end)| PanelRect {
x: (x_start as f64 / w as f64) * 100.0,
y: 0.0,
w: ((x_end - x_start) as f64 / w as f64) * 100.0,
h: 100.0,
})
.collect();
}
let mut panels = Vec::new();
for &(y_start, y_end) in &h_strips {
let v_gutters = find_vertical_gutters(
&gray, y_start, y_end, w, variance_threshold, min_gutter_width,
);
let v_strips = strips_from_gutters(&v_gutters, w);
for &(x_start, x_end) in &v_strips {
panels.push(PanelRect {
x: (x_start as f64 / w as f64) * 100.0,
y: (y_start as f64 / h as f64) * 100.0,
w: ((x_end - x_start) as f64 / w as f64) * 100.0,
h: ((y_end - y_start) as f64 / h as f64) * 100.0,
});
}
}
if panels.len() <= 1 {
return Vec::new();
}
panels
}
pub fn resolve_panel_reading_order(explicit: Option<&str>, rtl: bool) -> &'static str {
match explicit {
Some("horizontal-lr") => "horizontal-lr",
Some("horizontal-rl") => "horizontal-rl",
Some("vertical-lr") => "vertical-lr",
Some("vertical-rl") => "vertical-rl",
Some(other) => {
eprintln!("Warning: unknown panel-reading-order '{}', using default", other);
if rtl { "horizontal-rl" } else { "horizontal-lr" }
}
None => {
if rtl { "horizontal-rl" } else { "horizontal-lr" }
}
}
}
pub fn sort_panels_by_reading_order(panels: &mut Vec<PanelRect>, reading_order: &str) {
if panels.len() <= 1 {
return;
}
let tolerance = 5.0;
match reading_order {
"horizontal-lr" => {
panels.sort_by(|a, b| {
let row_a = (a.y / tolerance).floor() as i64;
let row_b = (b.y / tolerance).floor() as i64;
row_a.cmp(&row_b).then_with(|| a.x.partial_cmp(&b.x).unwrap_or(std::cmp::Ordering::Equal))
});
}
"horizontal-rl" => {
panels.sort_by(|a, b| {
let row_a = (a.y / tolerance).floor() as i64;
let row_b = (b.y / tolerance).floor() as i64;
row_a.cmp(&row_b).then_with(|| b.x.partial_cmp(&a.x).unwrap_or(std::cmp::Ordering::Equal))
});
}
"vertical-lr" => {
panels.sort_by(|a, b| {
let col_a = (a.x / tolerance).floor() as i64;
let col_b = (b.x / tolerance).floor() as i64;
col_a.cmp(&col_b).then_with(|| a.y.partial_cmp(&b.y).unwrap_or(std::cmp::Ordering::Equal))
});
}
"vertical-rl" => {
panels.sort_by(|a, b| {
let col_a = (a.x / tolerance).floor() as i64;
let col_b = (b.x / tolerance).floor() as i64;
col_b.cmp(&col_a).then_with(|| a.y.partial_cmp(&b.y).unwrap_or(std::cmp::Ordering::Equal))
});
}
_ => {
}
}
}
fn find_horizontal_gutters(
gray: &GrayImage,
width: u32,
height: u32,
variance_threshold: f64,
min_gutter_height: u32,
) -> Vec<(u32, u32)> {
let mut gutters = Vec::new();
let mut gutter_start: Option<u32> = None;
for y in 0..height {
let var = row_variance(gray, y, width);
if var < variance_threshold {
if gutter_start.is_none() {
gutter_start = Some(y);
}
} else {
if let Some(start) = gutter_start {
let run_len = y - start;
if run_len >= min_gutter_height {
gutters.push((start, y));
}
gutter_start = None;
}
}
}
if let Some(start) = gutter_start {
let run_len = height - start;
if run_len >= min_gutter_height {
gutters.push((start, height));
}
}
gutters
}
fn find_vertical_gutters(
gray: &GrayImage,
y_start: u32,
y_end: u32,
width: u32,
variance_threshold: f64,
min_gutter_width: u32,
) -> Vec<(u32, u32)> {
let mut gutters = Vec::new();
let mut gutter_start: Option<u32> = None;
let strip_height = y_end - y_start;
if strip_height == 0 {
return gutters;
}
for x in 0..width {
let var = col_variance_range(gray, x, y_start, y_end);
if var < variance_threshold {
if gutter_start.is_none() {
gutter_start = Some(x);
}
} else {
if let Some(start) = gutter_start {
let run_len = x - start;
if run_len >= min_gutter_width {
gutters.push((start, x));
}
gutter_start = None;
}
}
}
if let Some(start) = gutter_start {
let run_len = width - start;
if run_len >= min_gutter_width {
gutters.push((start, width));
}
}
gutters
}
fn col_variance_range(gray: &GrayImage, x: u32, y_start: u32, y_end: u32) -> f64 {
let n = (y_end - y_start) as f64;
if n <= 0.0 {
return 0.0;
}
let mut sum: f64 = 0.0;
let mut sum_sq: f64 = 0.0;
for y in y_start..y_end {
let v = gray.get_pixel(x, y).0[0] as f64;
sum += v;
sum_sq += v * v;
}
let mean = sum / n;
(sum_sq / n) - (mean * mean)
}
fn strips_from_gutters(gutters: &[(u32, u32)], total_size: u32) -> Vec<(u32, u32)> {
if gutters.is_empty() {
return vec![(0, total_size)];
}
let mut strips = Vec::new();
let mut pos = 0u32;
for &(g_start, g_end) in gutters {
if g_start > pos {
strips.push((pos, g_start));
}
pos = g_end;
}
if pos < total_size {
strips.push((pos, total_size));
}
strips
}
fn detect_panels_for_pages(pages: &[ProcessedImage]) -> Vec<Option<Vec<PanelRect>>> {
pages
.par_iter()
.map(|page| {
let img = image::load_from_memory(&page.jpeg_data).ok()?;
let panels = detect_panels(&img);
if panels.is_empty() {
None
} else {
Some(panels)
}
})
.collect()
}
fn find_and_parse_comic_info(
input: &Path,
cbz_temp_dir: Option<&Path>,
) -> Option<ComicMetadata> {
if let Some(temp_dir) = cbz_temp_dir {
let path = temp_dir.join("ComicInfo.xml");
if path.exists() {
return parse_comic_info(&path).ok();
}
if let Ok(entries) = fs::read_dir(temp_dir) {
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().to_lowercase();
if name == "comicinfo.xml" {
return parse_comic_info(&entry.path()).ok();
}
}
}
}
if input.is_dir() {
let path = input.join("ComicInfo.xml");
if path.exists() {
return parse_comic_info(&path).ok();
}
if let Ok(entries) = fs::read_dir(input) {
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().to_lowercase();
if name == "comicinfo.xml" {
return parse_comic_info(&entry.path()).ok();
}
}
}
}
None
}
pub fn parse_comic_info(path: &Path) -> Result<ComicMetadata, Box<dyn std::error::Error>> {
let content = fs::read_to_string(path)?;
parse_comic_info_xml(&content)
}
pub fn parse_comic_info_xml(xml: &str) -> Result<ComicMetadata, Box<dyn std::error::Error>> {
use quick_xml::events::Event;
use quick_xml::Reader;
let mut reader = Reader::from_str(xml);
let mut metadata = ComicMetadata::default();
let mut current_tag = String::new();
let mut buf = Vec::new();
loop {
match reader.read_event_into(&mut buf) {
Ok(Event::Start(ref e)) => {
current_tag = String::from_utf8_lossy(e.name().as_ref()).to_string();
}
Ok(Event::Text(ref e)) => {
let text = e.unescape().unwrap_or_default().to_string();
let text = text.trim().to_string();
if text.is_empty() {
continue;
}
match current_tag.to_ascii_lowercase().as_str() {
"title" => metadata.title = Some(text),
"series" => metadata.series = Some(text),
"number" => metadata.number = Some(text),
"writer" => {
for name in text.split(',') {
let name = name.trim().to_string();
if !name.is_empty() {
metadata.writers.push(name);
}
}
}
"penciller" => {
for name in text.split(',') {
let name = name.trim().to_string();
if !name.is_empty() {
metadata.pencillers.push(name);
}
}
}
"inker" => {
for name in text.split(',') {
let name = name.trim().to_string();
if !name.is_empty() {
metadata.inkers.push(name);
}
}
}
"summary" => metadata.summary = Some(text),
"manga" => {
if text.eq_ignore_ascii_case("YesAndRightToLeft")
|| text.eq_ignore_ascii_case("Yes")
{
metadata.manga_rtl = true;
}
}
"languageiso" => metadata.language = Some(text),
"year" => metadata.year = Some(text),
"month" => metadata.month = Some(text),
_ => {}
}
}
Ok(Event::End(_)) => {
current_tag.clear();
}
Ok(Event::Eof) => break,
Err(e) => {
eprintln!("Warning: error parsing ComicInfo.xml: {}", e);
break;
}
_ => {}
}
buf.clear();
}
if metadata.title.is_some() || metadata.series.is_some() {
eprintln!("Parsed ComicInfo.xml: {}", metadata.effective_title().unwrap_or_default());
}
if metadata.manga_rtl {
eprintln!("ComicInfo.xml specifies manga (RTL) reading direction");
}
Ok(metadata)
}
fn create_temp_dir(output: &Path) -> Result<PathBuf, Box<dyn std::error::Error>> {
let stem = output.file_stem().unwrap_or_default().to_string_lossy();
let parent = output.parent().unwrap_or(Path::new("."));
let temp_dir = parent.join(format!(".kindling_comic_{}", stem));
if temp_dir.exists() {
fs::remove_dir_all(&temp_dir)?;
}
fs::create_dir_all(&temp_dir)?;
Ok(temp_dir)
}
fn write_fixed_layout_epub_v2(
temp_dir: &Path,
pages: &[ProcessedImage],
profile: &DeviceProfile,
rtl: bool,
metadata: Option<&ComicMetadata>,
panel_view: bool,
options: &ComicOptions,
) -> Result<PathBuf, Box<dyn std::error::Error>> {
let images_dir = temp_dir.join("images");
fs::create_dir_all(&images_dir)?;
for page in pages {
let filename = format!("page_{:04}.jpg", page.index);
fs::write(images_dir.join(&filename), &page.jpeg_data)?;
}
let any_panels = panel_view && pages.iter().any(|p| p.panels.is_some());
for page in pages {
let xhtml = build_page_xhtml(page.index, page.width, page.height, page.panels.as_deref(), options.kindlegen_parity);
let filename = format!("page_{:04}.xhtml", page.index);
fs::write(temp_dir.join(&filename), xhtml.as_bytes())?;
}
let css = "@page {\nmargin: 0;\n}\nbody {\ndisplay: block;\nmargin: 0;\npadding: 0;\n}\n";
fs::write(temp_dir.join("comic.css"), css.as_bytes())?;
let external_cover_id = if let Some(CoverSource::FilePath(ref path)) = options.cover {
let cover_filename = "cover_override.jpg";
let cover_data = fs::read(path).map_err(|e| {
format!("Could not read cover image {}: {}", path.display(), e)
})?;
let cover_img = image::load_from_memory(&cover_data).map_err(|e| {
format!("Could not decode cover image {}: {}", path.display(), e)
})?;
let cover_img = if options.cover_fill {
cover_fill_crop(&cover_img, profile.width, profile.height)
} else {
cover_img
};
let cover_resized = cover_img.resize(profile.width, profile.height, FilterType::Lanczos3);
let cover_jpeg = encode_jpeg(&cover_resized, options.jpeg_quality)?;
fs::write(images_dir.join(cover_filename), &cover_jpeg)?;
Some(cover_filename.to_string())
} else {
None
};
let uid = format!(
"kindling-comic-{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs()
);
let opf = build_comic_opf_v2(pages.len(), profile, rtl, metadata, any_panels, options, external_cover_id.as_deref(), &uid);
let opf_path = temp_dir.join("content.opf");
fs::write(&opf_path, opf.as_bytes())?;
let ncx = build_comic_ncx(pages.len(), &uid);
fs::write(temp_dir.join("toc.ncx"), ncx.as_bytes())?;
Ok(opf_path)
}
fn build_comic_opf_v2(
num_pages: usize,
profile: &DeviceProfile,
rtl: bool,
metadata: Option<&ComicMetadata>,
panel_view: bool,
options: &ComicOptions,
external_cover_filename: Option<&str>,
uid: &str,
) -> String {
let mut manifest_items = String::new();
let mut spine_items = String::new();
manifest_items.push_str(" <item id=\"ncx\" href=\"toc.ncx\" media-type=\"application/x-dtbncx+xml\"/>\n");
manifest_items.push_str(" <item id=\"css\" href=\"comic.css\" media-type=\"text/css\"/>\n");
for i in 0..num_pages {
manifest_items.push_str(&format!(
" <item id=\"page{:04}\" href=\"page_{:04}.xhtml\" media-type=\"application/xhtml+xml\"/>\n",
i, i
));
manifest_items.push_str(&format!(
" <item id=\"img{:04}\" href=\"images/page_{:04}.jpg\" media-type=\"image/jpeg\"/>\n",
i, i
));
spine_items.push_str(&format!(
" <itemref idref=\"page{:04}\"/>\n",
i
));
}
let (cover_meta, cover_manifest_entry) = if let Some(ref cover_filename) = external_cover_filename {
let entry = format!(
" <item id=\"cover_img\" href=\"images/{}\" media-type=\"image/jpeg\"/>\n",
cover_filename
);
(" <meta name=\"cover\" content=\"cover_img\"/>\n".to_string(), entry)
} else if let Some(CoverSource::PageNumber(page_num)) = options.cover {
let idx = page_num.saturating_sub(1);
if idx < num_pages {
(format!(" <meta name=\"cover\" content=\"img{:04}\"/>\n", idx), String::new())
} else {
eprintln!("Warning: cover page {} exceeds page count {}, using first page", page_num, num_pages);
if num_pages > 0 {
(" <meta name=\"cover\" content=\"img0000\"/>\n".to_string(), String::new())
} else {
(String::new(), String::new())
}
}
} else if num_pages > 0 {
(" <meta name=\"cover\" content=\"img0000\"/>\n".to_string(), String::new())
} else {
(String::new(), String::new())
};
let title = if let Some(ref t) = options.title_override {
t.clone()
} else {
metadata
.and_then(|m| m.effective_title())
.unwrap_or_else(|| "Comic".to_string())
};
let mut creator_entries = String::new();
let creator_value: String = if let Some(ref author) = options.author_override {
author.clone()
} else if let Some(meta) = metadata {
let creators = meta.creators();
if creators.is_empty() {
DEFAULT_AUTHOR.to_string()
} else {
creators.join(", ")
}
} else {
DEFAULT_AUTHOR.to_string()
};
creator_entries.push_str(&format!(
" <dc:creator>{}</dc:creator>\n",
escape_xml(&creator_value)
));
let language = options
.language
.as_deref()
.or_else(|| metadata.and_then(|m| m.language.as_deref()))
.unwrap_or("en");
let mut description_entry = String::new();
if let Some(meta) = metadata {
if let Some(ref summary) = meta.summary {
description_entry = format!(
" <dc:description>{}</dc:description>\n",
escape_xml(summary)
);
}
}
let ppd = if rtl { "rtl" } else { "ltr" };
let writing_mode_meta = if rtl {
" <meta name=\"writing-mode\" content=\"horizontal-rl\"/>\n"
} else {
""
};
let panel_view_meta = if panel_view {
" <meta name=\"book-type\" content=\"comic\"/>\n <meta name=\"region-mag\" content=\"true\"/>\n"
} else {
""
};
format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<package version="3.0" xmlns="http://www.idpf.org/2007/opf" unique-identifier="uid">
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/">
<dc:title>{title}</dc:title>
<dc:language>{language}</dc:language>
<dc:identifier id="uid">{uid}</dc:identifier>
{creator_entries}{description_entry} <meta name="fixed-layout" content="true"/>
<meta name="original-resolution" content="{width}x{height}"/>
<meta property="rendition:layout">pre-paginated</meta>
<meta property="rendition:orientation">auto</meta>
{writing_mode_meta}{panel_view_meta}{cover_meta} </metadata>
<manifest>
{cover_manifest_entry}{manifest_items} </manifest>
<spine toc="ncx" page-progression-direction="{ppd}">
{spine_items} </spine>
</package>
"#,
title = escape_xml(&title),
language = escape_xml(language),
uid = uid,
width = profile.width,
height = profile.height,
cover_meta = cover_meta,
cover_manifest_entry = cover_manifest_entry,
creator_entries = creator_entries,
description_entry = description_entry,
manifest_items = manifest_items,
spine_items = spine_items,
ppd = ppd,
writing_mode_meta = writing_mode_meta,
panel_view_meta = panel_view_meta,
)
}
fn escape_xml(s: &str) -> String {
s.replace('&', "&")
.replace('<', "<")
.replace('>', ">")
.replace('"', """)
.replace('\'', "'")
}
fn build_page_xhtml(
page_index: usize,
img_width: u32,
img_height: u32,
panels: Option<&[PanelRect]>,
kindlegen_parity: bool,
) -> String {
if kindlegen_parity {
let panel_divs = match panels {
Some(rects) if !rects.is_empty() => {
let mut divs = String::new();
divs.push_str(r#"<div id="panels" style="position:absolute;top:0;left:0;width:100%;height:100%">"#);
for rect in rects {
divs.push_str(&format!(
r#"<div class="panel" style="position:absolute;left:{x:.1}%;top:{y:.1}%;width:{w:.1}%;height:{h:.1}%"></div>"#,
x = rect.x,
y = rect.y,
w = rect.w,
h = rect.h,
));
}
divs.push_str("</div>");
divs
}
_ => String::new(),
};
return format!(
r#"<?xml version="1.0" encoding="UTF-8"?><html xmlns="http://www.w3.org/1999/xhtml"><head><title>Page {page_num}</title></head><body><div><img src="images/page_{index:04}.jpg" alt="page {page_num}" width="{w}" height="{h}"/></div>{panel_divs}</body></html>"#,
page_num = page_index + 1,
index = page_index,
w = img_width,
h = img_height,
panel_divs = panel_divs,
);
}
let panel_divs = match panels {
Some(rects) if !rects.is_empty() => {
let mut divs = String::new();
divs.push_str(" <div id=\"panels\" style=\"position:absolute;top:0;left:0;width:100%;height:100%\">\n");
for rect in rects {
divs.push_str(&format!(
" <div class=\"panel\" style=\"position:absolute;left:{x:.1}%;top:{y:.1}%;width:{w:.1}%;height:{h:.1}%\"></div>\n",
x = rect.x,
y = rect.y,
w = rect.w,
h = rect.h,
));
}
divs.push_str(" </div>\n");
divs
}
_ => String::new(),
};
format!(
r#"<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE html><html xmlns="http://www.w3.org/1999/xhtml"><head>
<title>Page {page_num}</title>
<link href="kindle:flow:0001?mime=text/css" type="text/css" rel="stylesheet"/>
<meta name="viewport" content="width={w}, height={h}"/>
</head><body style="background-color:#000000;">
<div style="text-align:center;">
<img width="{w}" height="{h}" src="images/page_{index:04}.jpg"/>
</div>
{panel_divs}</body></html>
"#,
page_num = page_index + 1,
index = page_index,
w = img_width,
h = img_height,
panel_divs = panel_divs,
)
}
fn build_comic_ncx(num_pages: usize, uid: &str) -> String {
let mut nav_points = String::new();
for i in 0..num_pages {
nav_points.push_str(&format!(
r#" <navPoint id="page{index:04}" playOrder="{order}">
<navLabel><text>Page {page_num}</text></navLabel>
<content src="page_{index:04}.xhtml"/>
</navPoint>
"#,
index = i,
order = i + 1,
page_num = i + 1,
));
}
format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1">
<head>
<meta name="dtb:uid" content="{uid}"/>
<meta name="dtb:depth" content="1"/>
<meta name="dtb:totalPageCount" content="{num_pages}"/>
<meta name="dtb:maxPageNumber" content="{num_pages}"/>
</head>
<docTitle><text>Comic</text></docTitle>
<navMap>
{nav_points} </navMap>
</ncx>
"#,
num_pages = num_pages,
nav_points = nav_points,
uid = uid,
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_comic_info_lowercase_tags() {
let xml = r#"<?xml version='1.0' encoding='utf-8'?>
<ComicInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Series>Star Wars: Darth Vader Modern Era Epic Collection: Vader Down</Series>
<LanguageISO>en</LanguageISO>
<PageCount>407</PageCount>
<writer>Kieron Gillen, Jason Aaron</writer>
<Month>11</Month>
<Year>2025</Year>
</ComicInfo>"#;
let meta = parse_comic_info_xml(xml).expect("parse should succeed");
assert_eq!(
meta.series.as_deref(),
Some("Star Wars: Darth Vader Modern Era Epic Collection: Vader Down"),
"Series should be parsed from mixed-case tag"
);
assert!(
meta.writers.iter().any(|w| w == "Kieron Gillen"),
"Writers should include Kieron Gillen, got {:?}",
meta.writers,
);
assert!(
meta.writers.iter().any(|w| w == "Jason Aaron"),
"Writers should include Jason Aaron, got {:?}",
meta.writers,
);
assert_eq!(meta.language.as_deref(), Some("en"));
assert_eq!(meta.year.as_deref(), Some("2025"));
assert_eq!(meta.month.as_deref(), Some("11"));
}
#[test]
fn test_parse_comic_info_manga_case_insensitive() {
let xml = r#"<?xml version='1.0' encoding='utf-8'?>
<ComicInfo>
<manga>yesandrighttoleft</manga>
</ComicInfo>"#;
let meta = parse_comic_info_xml(xml).expect("parse should succeed");
assert!(meta.manga_rtl, "manga RTL should be detected case-insensitively");
}
#[test]
fn test_comic_opf_passes_validate() {
let dir = std::env::temp_dir().join("kindling_comic_validate_test");
let _ = std::fs::remove_dir_all(&dir);
std::fs::create_dir_all(&dir).unwrap();
let jpeg: Vec<u8> = vec![
0xFF, 0xD8, 0xFF, 0xE0, 0x00, 0x10, 0x4A, 0x46, 0x49, 0x46, 0x00, 0x01,
0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xFF, 0xDB, 0x00, 0x43,
0x00, 0x08, 0x06, 0x06, 0x07, 0x06, 0x05, 0x08, 0x07, 0x07, 0x07, 0x09,
0x09, 0x08, 0x0A, 0x0C, 0x14, 0x0D, 0x0C, 0x0B, 0x0B, 0x0C, 0x19, 0x12,
0x13, 0x0F, 0x14, 0x1D, 0x1A, 0x1F, 0x1E, 0x1D, 0x1A, 0x1C, 0x1C, 0x20,
0x24, 0x2E, 0x27, 0x20, 0x22, 0x2C, 0x23, 0x1C, 0x1C, 0x28, 0x37, 0x29,
0x2C, 0x30, 0x31, 0x34, 0x34, 0x34, 0x1F, 0x27, 0x39, 0x3D, 0x38, 0x32,
0x3C, 0x2E, 0x33, 0x34, 0x32, 0xFF, 0xC0, 0x00, 0x0B, 0x08, 0x00, 0x01,
0x00, 0x01, 0x01, 0x01, 0x11, 0x00, 0xFF, 0xC4, 0x00, 0x1F, 0x00, 0x00,
0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x09, 0x0A, 0x0B, 0xFF, 0xC4, 0x00, 0xB5, 0x10, 0x00, 0x02, 0x01, 0x03,
0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7D,
0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06,
0x13, 0x51, 0x61, 0x07, 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xA1, 0x08,
0x23, 0x42, 0xB1, 0xC1, 0x15, 0x52, 0xD1, 0xF0, 0x24, 0x33, 0x62, 0x72,
0x82, 0xFF, 0xDA, 0x00, 0x08, 0x01, 0x01, 0x00, 0x00, 0x3F, 0x00, 0xFB,
0xD0, 0xFF, 0xD9,
];
std::fs::write(dir.join("cover.jpg"), &jpeg).unwrap();
std::fs::write(
dir.join("page.xhtml"),
r#"<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml"><head><title>P</title>
<meta name="viewport" content="width=1072, height=1448"/></head>
<body><div><img src="cover.jpg" alt="p"/></div></body></html>"#,
)
.unwrap();
let opf = r#"<?xml version="1.0" encoding="UTF-8"?>
<package version="3.0" xmlns="http://www.idpf.org/2007/opf" unique-identifier="uid">
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/">
<dc:title>Test Comic</dc:title>
<dc:language>en</dc:language>
<dc:identifier id="uid">kindling-comic-test</dc:identifier>
<dc:creator>Unknown</dc:creator>
<meta name="fixed-layout" content="true"/>
<meta name="original-resolution" content="1072x1448"/>
<meta name="cover" content="cover-img"/>
</metadata>
<manifest>
<item id="cover-img" href="cover.jpg" media-type="image/jpeg"/>
<item id="page1" href="page.xhtml" media-type="application/xhtml+xml"/>
</manifest>
<spine>
<itemref idref="page1"/>
</spine>
</package>
"#;
let opf_path = dir.join("content.opf");
std::fs::write(&opf_path, opf).unwrap();
let report = crate::validate::validate_opf(&opf_path)
.expect("validate should parse the OPF");
assert_eq!(
report.error_count(),
0,
"comic OPF should have 0 errors, got: {:?}",
report.findings,
);
let _ = std::fs::remove_dir_all(&dir);
}
}