use kindling::{
comic, epub, extracted::ExtractedEpub, kdp_rules, mobi, mobi_check, mobi_dump, mobi_rewrite,
opf, repair, validate,
};
use std::path::PathBuf;
use std::process;
use clap::{Parser, Subcommand};
#[derive(Parser)]
#[command(name = "kindling", about = "Kindle MOBI builder for dictionaries and books", version)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
#[command(version)]
Build {
input: PathBuf,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(long)]
no_compress: bool,
#[arg(long)]
headwords_only: bool,
#[arg(long)]
no_embed_source: bool,
#[arg(long)]
include_cmet: bool,
#[arg(long)]
no_hd_images: bool,
#[arg(long)]
creator_tag: bool,
#[arg(long)]
legacy_mobi: bool,
#[arg(long, hide = true)]
kf8_only: bool,
#[arg(long, overrides_with = "no_kindle_limits")]
kindle_limits: bool,
#[arg(long, overrides_with = "kindle_limits")]
no_kindle_limits: bool,
#[arg(long)]
no_validate: bool,
#[arg(long)]
no_self_check: bool,
#[arg(long)]
strict_accents: bool,
},
#[command(version)]
Comic {
input: PathBuf,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(short, long, default_value = "paperwhite")]
device: String,
#[arg(long)]
rtl: bool,
#[arg(long)]
no_split: bool,
#[arg(long)]
rotate_spreads: bool,
#[arg(long, default_value = "2", value_parser = clap::value_parser!(u8).range(0..=2))]
crop: u8,
#[arg(long, hide = true)]
no_crop: bool,
#[arg(long)]
no_enhance: bool,
#[arg(long)]
webtoon: bool,
#[arg(long)]
no_panel_view: bool,
#[arg(long)]
panel_reading_order: Option<String>,
#[arg(long, default_value = "85", value_parser = clap::value_parser!(u8).range(1..=100))]
jpeg_quality: u8,
#[arg(long, default_value = "65536")]
max_height: u32,
#[arg(long)]
embed_source: bool,
#[arg(long, hide = true)]
no_embed_source: bool,
#[arg(long, default_value = "pdoc")]
doc_type: String,
#[arg(long)]
title: Option<String>,
#[arg(long)]
author: Option<String>,
#[arg(long)]
language: Option<String>,
#[arg(long)]
cover: Option<String>,
#[arg(long)]
cover_fill: bool,
#[arg(long)]
legacy_mobi: bool,
#[arg(long, hide = true)]
kf8_only: bool,
#[arg(long, overrides_with = "no_kindle_limits")]
kindle_limits: bool,
#[arg(long, overrides_with = "kindle_limits")]
no_kindle_limits: bool,
#[arg(long)]
no_self_check: bool,
#[arg(long)]
kindlegen_parity: bool,
},
#[command(version)]
Validate {
input: PathBuf,
#[arg(long)]
strict: bool,
},
#[command(version)]
Repair {
input: PathBuf,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(long)]
report_json: bool,
#[arg(long)]
dry_run: bool,
},
#[command(version)]
RewriteMetadata {
input: PathBuf,
#[arg(short, long)]
output: Option<PathBuf>,
#[arg(long)]
report_json: bool,
#[arg(long)]
dry_run: bool,
#[arg(long)]
title: Option<String>,
#[arg(long = "author", action = clap::ArgAction::Append)]
authors: Vec<String>,
#[arg(long)]
publisher: Option<String>,
#[arg(long)]
description: Option<String>,
#[arg(long)]
language: Option<String>,
#[arg(long)]
isbn: Option<String>,
#[arg(long)]
asin: Option<String>,
#[arg(long = "publication-date")]
publication_date: Option<String>,
#[arg(long = "subject", action = clap::ArgAction::Append)]
subjects: Vec<String>,
#[arg(long)]
series: Option<String>,
#[arg(long = "series-index")]
series_index: Option<String>,
#[arg(long)]
cover: Option<PathBuf>,
},
#[command(version)]
Dump {
input: PathBuf,
},
}
fn is_kindlegen_compat_mode() -> bool {
let args: Vec<String> = std::env::args().collect();
if args.len() < 2 {
return false;
}
let first_arg = &args[1];
let lower = first_arg.to_lowercase();
lower.ends_with(".opf") || lower.ends_with(".epub")
}
fn parse_kindlegen_args() -> (PathBuf, Option<String>, bool, bool) {
let args: Vec<String> = std::env::args().collect();
let input = PathBuf::from(&args[1]);
let mut output_name: Option<String> = None;
let mut no_validate = false;
let mut no_self_check = false;
let mut i = 2;
while i < args.len() {
match args[i].as_str() {
"-o" => {
if i + 1 < args.len() {
output_name = Some(args[i + 1].clone());
i += 2;
} else {
i += 1;
}
}
"-locale" => {
i += 2;
}
"-dont_append_source" | "-c0" | "-c1" | "-c2" | "-verbose" => {
i += 1;
}
"-no_validate" | "--no-validate" => {
no_validate = true;
i += 1;
}
"--no-self-check" | "-no_self_check" => {
no_self_check = true;
i += 1;
}
_ => {
i += 1;
}
}
}
(input, output_name, no_validate, no_self_check)
}
fn resolve_output_path(input: &PathBuf, output: Option<PathBuf>, kf8_only: bool) -> PathBuf {
match output {
Some(p) => p,
None => {
let ext = if kf8_only { "azw3" } else { "mobi" };
input.with_extension(ext)
}
}
}
fn detect_is_dictionary(input: &std::path::Path) -> bool {
let is_epub = input
.extension()
.map(|ext| ext.eq_ignore_ascii_case("epub"))
.unwrap_or(false);
if is_epub {
match epub::extract_epub(input) {
Ok((temp_dir, opf_path)) => {
let result = opf::OPFData::parse(&opf_path)
.map(|data| data.is_dictionary())
.unwrap_or(false);
epub::cleanup_temp_dir(&temp_dir);
result
}
Err(_) => false,
}
} else {
opf::OPFData::parse(input)
.map(|data| data.is_dictionary())
.unwrap_or(false)
}
}
#[allow(clippy::too_many_arguments)]
fn do_build(
input: &PathBuf,
output_path: &PathBuf,
no_compress: bool,
headwords_only: bool,
embed_source: bool,
include_cmet: bool,
no_hd_images: bool,
creator_tag: bool,
kf8_only: bool,
kindle_limits: bool,
no_validate: bool,
self_check: bool,
strict_accents: bool,
) {
let is_epub = input
.extension()
.map(|ext| ext.eq_ignore_ascii_case("epub"))
.unwrap_or(false);
let srcs_data = if embed_source && is_epub {
match std::fs::read(input) {
Ok(data) => {
eprintln!("SRCS: embedding {} bytes of EPUB source", data.len());
Some(data)
}
Err(e) => {
eprintln!("Warning: could not read EPUB for SRCS embedding: {}", e);
None
}
}
} else {
if embed_source && !is_epub {
eprintln!("Note: EPUB source embedding skipped for non-EPUB input");
}
None
};
let mut opf_snapshot: Option<(String, String, bool)> = None;
let (temp_dir, opf_path): (Option<std::path::PathBuf>, std::path::PathBuf) = if is_epub {
match epub::extract_epub(input) {
Ok((dir, path)) => (Some(dir), path),
Err(e) => {
eprintln!("Error extracting EPUB: {}", e);
println!("Error(prcgen):E24000: Could not process input file");
process::exit(1);
}
}
} else {
(None, input.clone())
};
let extracted: Option<ExtractedEpub> = match ExtractedEpub::from_opf_path(&opf_path) {
Ok(e) => Some(e),
Err(_) => None,
};
let preflight = match extracted.as_ref() {
Some(e) => kindling::run_preflight_validation_on_extracted(e, no_validate),
None => kindling::run_preflight_validation(&opf_path, no_validate),
};
if let Err(errors) = preflight {
if let Some(ref dir) = temp_dir {
epub::cleanup_temp_dir(dir);
}
eprintln!(
"Build aborted: {} validation errors. Run with --no-validate to skip.",
errors
);
println!("Error(prcgen):E24000: Could not build Mobi file");
process::exit(1);
}
if let Some(ref e) = extracted {
opf_snapshot = Some((
e.opf.title.clone(),
e.opf.author.clone(),
e.opf.is_dictionary(),
));
} else if let Ok(parsed) = opf::OPFData::parse(&opf_path) {
opf_snapshot = Some((parsed.title.clone(), parsed.author.clone(), parsed.is_dictionary()));
}
let result = match extracted.as_ref() {
Some(e) => mobi::build_mobi_from_extracted(
e, output_path, no_compress, headwords_only,
srcs_data.as_deref(), include_cmet, no_hd_images, creator_tag, kf8_only, None, kindle_limits, self_check,
false, strict_accents,
),
None => mobi::build_mobi(
&opf_path, output_path, no_compress, headwords_only,
srcs_data.as_deref(), include_cmet, no_hd_images, creator_tag, kf8_only, None, kindle_limits, self_check,
false, strict_accents,
),
};
if let Some(ref dir) = temp_dir {
epub::cleanup_temp_dir(dir);
}
match result {
Ok(()) => {
let (title, author, is_dictionary) = opf_snapshot
.as_ref()
.map(|(t, a, d)| (t.as_str(), a.as_str(), *d))
.unwrap_or(("", "", false));
let expected = mobi_check::ExpectedMetadata {
title: if title.is_empty() { None } else { Some(title) },
author: if author.is_empty() { None } else { Some(author) },
is_comic: false,
is_dictionary,
};
match mobi_check::check_mobi_file(output_path, &expected) {
Ok(report) => {
if let Err(e) = mobi_check::report_result(output_path, &report) {
eprintln!("Error: {}", e);
println!("Error(prcgen):E24000: Could not build Mobi file");
process::exit(1);
}
}
Err(e) => {
eprintln!("Warning: MOBI post-build check could not run: {}", e);
}
}
println!("Info(prcgen):I1036: Mobi file built successfully");
}
Err(e) => {
eprintln!("Error: {}", e);
let err_str = format!("{}", e);
if err_str.contains("too big") || err_str.contains("too large") {
println!("Error(prcgen):E23026: File too big");
} else {
println!("Error(prcgen):E24000: Could not build Mobi file");
}
process::exit(1);
}
}
}
fn main() {
if is_kindlegen_compat_mode() {
let (input, output_name, no_validate, no_self_check) = parse_kindlegen_args();
let output_path = if let Some(name) = output_name {
let parent = input.parent().unwrap_or(std::path::Path::new("."));
parent.join(name)
} else {
input.with_extension("mobi")
};
do_build(&input, &output_path, false, false, true, false, false, false, false, true, no_validate, !no_self_check, false);
} else {
let cli = Cli::parse();
match cli.command {
Commands::Build {
input,
output,
no_compress,
headwords_only,
no_embed_source,
include_cmet,
no_hd_images,
creator_tag,
legacy_mobi,
kf8_only,
kindle_limits,
no_kindle_limits,
no_validate,
no_self_check,
strict_accents,
} => {
let effective_kindle_limits = if no_kindle_limits {
false
} else if kindle_limits {
true
} else {
true
};
if kf8_only {
eprintln!(
"Note: --kf8-only is now the default for non-dictionary \
builds and has no effect. Dictionaries still build as \
dual-format MOBI7+KF8 because Kindle's lookup popup \
requires the MOBI7 INDX format."
);
}
let is_dictionary = detect_is_dictionary(&input);
let effective_kf8_only = if is_dictionary {
if legacy_mobi {
eprintln!(
"Note: --legacy-mobi is implicit for dictionary \
builds (dictionaries always use MOBI7 INDX)."
);
}
false
} else if legacy_mobi {
eprintln!(
"Building dual-format MOBI7+KF8 (.mobi). Modern Kindles \
prefer KF8-only .azw3. Drop --legacy-mobi to use the \
modern default."
);
false
} else {
true
};
let output_path = resolve_output_path(&input, output, effective_kf8_only);
do_build(&input, &output_path, no_compress, headwords_only, !no_embed_source, include_cmet, no_hd_images, creator_tag, effective_kf8_only, effective_kindle_limits, no_validate, !no_self_check, strict_accents);
}
Commands::Comic {
input,
output,
device,
rtl,
no_split,
rotate_spreads,
crop,
no_crop,
no_enhance,
webtoon,
no_panel_view,
jpeg_quality,
max_height,
embed_source,
no_embed_source,
doc_type,
title,
author,
language,
cover,
cover_fill,
panel_reading_order,
legacy_mobi,
kf8_only,
kindle_limits,
no_kindle_limits,
no_self_check,
kindlegen_parity,
} => {
let profile = match comic::get_profile(&device) {
Some(p) => p,
None => {
eprintln!("Error: unknown device '{}'. Valid devices: {}", device, comic::valid_device_names());
process::exit(1);
}
};
if kf8_only {
eprintln!(
"Note: --kf8-only is now the default for comic builds \
and has no effect. Pass --legacy-mobi for the old \
dual MOBI7+KF8 behavior."
);
}
let effective_kf8_only = if legacy_mobi {
eprintln!(
"Building dual-format MOBI7+KF8 (.mobi). Modern Kindles \
prefer KF8-only .azw3. Drop --legacy-mobi to use the \
modern default."
);
false
} else {
true
};
let output_path = match output {
Some(p) => p,
None => {
let ext = if effective_kf8_only { "azw3" } else { "mobi" };
input.with_extension(ext)
}
};
let doc_type_value = match doc_type.to_lowercase().as_str() {
"ebok" => Some("EBOK".to_string()),
"pdoc" => None, other => {
eprintln!("Warning: unknown --doc-type '{}', using default 'pdoc'", other);
None
}
};
let cover_source = cover.map(|c| {
if let Ok(page_num) = c.parse::<usize>() {
if page_num >= 1 {
comic::CoverSource::PageNumber(page_num)
} else {
eprintln!("Warning: cover page number must be >= 1, treating as file path");
comic::CoverSource::FilePath(PathBuf::from(c))
}
} else {
comic::CoverSource::FilePath(PathBuf::from(c))
}
});
let effective_kindle_limits = kindle_limits && !no_kindle_limits;
if no_embed_source {
eprintln!("Note: --no-embed-source is now the default for comics and has no effect");
}
let effective_embed_source = embed_source;
let effective_crop = if no_crop { 0 } else { crop };
let options = comic::ComicOptions {
rtl,
split: !no_split,
crop: effective_crop,
enhance: !no_enhance,
webtoon,
panel_view: !no_panel_view,
jpeg_quality,
max_height,
embed_source: effective_embed_source,
doc_type: doc_type_value,
title_override: title,
author_override: author,
language,
cover: cover_source,
rotate_spreads,
panel_reading_order,
cover_fill,
kindle_limits: effective_kindle_limits,
kf8_only: effective_kf8_only,
self_check: !no_self_check,
kindlegen_parity,
};
match comic::build_comic_with_options(&input, &output_path, &profile, &options) {
Ok(()) => {
let format_name = if effective_kf8_only { "AZW3" } else { "MOBI" };
eprintln!("Comic {} built successfully: {}", format_name, output_path.display());
}
Err(e) => {
eprintln!("Error: {}", e);
process::exit(1);
}
}
}
Commands::Validate { input, strict } => {
do_validate(&input, strict);
}
Commands::Repair {
input,
output,
report_json,
dry_run,
} => {
do_repair(&input, output.as_ref(), report_json, dry_run);
}
Commands::RewriteMetadata {
input,
output,
report_json,
dry_run,
title,
authors,
publisher,
description,
language,
isbn,
asin,
publication_date,
subjects,
series,
series_index,
cover,
} => {
do_rewrite_metadata(
&input,
output.as_ref(),
report_json,
dry_run,
title,
authors,
publisher,
description,
language,
isbn,
asin,
publication_date,
subjects,
series,
series_index,
cover.as_ref(),
);
}
Commands::Dump { input } => {
do_dump(&input);
}
}
}
}
fn do_dump(path: &PathBuf) {
match mobi_dump::dump_mobi(path) {
Ok(s) => {
print!("{}", s);
}
Err(e) => {
eprintln!("Error: could not dump {}: {}", path.display(), e);
process::exit(1);
}
}
}
fn do_validate(opf_path: &PathBuf, strict: bool) {
println!(
"Validating {} against Kindle Publishing Guidelines v{}",
opf_path.display(),
kdp_rules::KPG_VERSION
);
let epub = match kindling::extracted::ExtractedEpub::from_opf_path(opf_path) {
Ok(e) => e,
Err(e) => {
eprintln!("Error: could not parse OPF {}: {}", opf_path.display(), e);
process::exit(2);
}
};
let report = validate::validate(&epub);
for finding in &report.findings {
println!("{}", finding);
}
let errors = report.error_count();
let warnings = report.warning_count();
let infos = report.info_count();
println!("{} errors, {} warnings, {} info", errors, warnings, infos);
let fail = errors > 0 || (strict && warnings > 0);
if fail {
process::exit(1);
}
}
fn do_repair(input: &PathBuf, output: Option<&PathBuf>, report_json: bool, dry_run: bool) {
let default_output;
let output_path: PathBuf = if dry_run {
input.clone()
} else if let Some(p) = output {
p.clone()
} else {
let stem = input
.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_else(|| "repaired".to_string());
let parent = input.parent().unwrap_or(std::path::Path::new("."));
default_output = parent.join(format!("{}-fixed.epub", stem));
default_output
};
let result = if dry_run {
repair::scan_epub(input)
} else {
repair::repair_epub(input, &output_path)
};
let report = match result {
Ok(r) => r,
Err(e) => {
eprintln!("Error: {}", e);
process::exit(1);
}
};
if report_json {
println!("{}", report.to_json());
} else {
let prefix = if dry_run { "(dry-run) " } else { "" };
for fix in &report.fixes_applied {
eprintln!("{}{}", prefix, fix.describe());
}
for warn in &report.warnings {
eprintln!("{}warning: {}: {}", prefix, warn.file, warn.message);
}
if report.any_fixes() {
eprintln!(
"{}Repaired {} issue{} in {}",
prefix,
report.fix_count(),
if report.fix_count() == 1 { "" } else { "s" },
input.display()
);
} else {
eprintln!("{}No repairs needed for {}", prefix, input.display());
}
if !dry_run {
eprintln!("Output written to {}", output_path.display());
}
}
}
#[allow(clippy::too_many_arguments)]
fn do_rewrite_metadata(
input: &PathBuf,
output: Option<&PathBuf>,
report_json: bool,
dry_run: bool,
title: Option<String>,
authors: Vec<String>,
publisher: Option<String>,
description: Option<String>,
language: Option<String>,
isbn: Option<String>,
asin: Option<String>,
publication_date: Option<String>,
subjects: Vec<String>,
series: Option<String>,
series_index: Option<String>,
cover: Option<&PathBuf>,
) {
let default_output;
let output_path: PathBuf = if dry_run {
input.clone()
} else if let Some(p) = output {
p.clone()
} else {
let stem = input
.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_else(|| "rewritten".to_string());
let ext = input
.extension()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_else(|| "mobi".to_string());
let parent = input.parent().unwrap_or(std::path::Path::new("."));
default_output = parent.join(format!("{}-meta.{}", stem, ext));
default_output
};
let cover_bytes = match cover {
Some(p) => match std::fs::read(p) {
Ok(b) => Some(b),
Err(e) => {
eprintln!("Error: could not read cover image {}: {}", p.display(), e);
process::exit(1);
}
},
None => None,
};
let updates = mobi_rewrite::MetadataUpdates {
title,
authors: if authors.is_empty() { None } else { Some(authors) },
publisher,
description,
language,
isbn,
asin,
publication_date,
subjects: if subjects.is_empty() {
None
} else {
Some(subjects)
},
series,
series_index,
cover_image: cover_bytes,
};
let report_result = if dry_run {
let scratch = std::env::temp_dir().join(format!(
"kindling_rewrite_metadata_dryrun_{}.bin",
std::process::id()
));
let r = mobi_rewrite::rewrite_mobi_metadata(input, &scratch, &updates);
let _ = std::fs::remove_file(&scratch);
r
} else {
mobi_rewrite::rewrite_mobi_metadata(input, &output_path, &updates)
};
let report = match report_result {
Ok(r) => r,
Err(e) => {
eprintln!("Error: {}", e);
process::exit(1);
}
};
if report_json {
println!("{}", rewrite_report_to_json(&report));
} else {
let prefix = if dry_run { "(dry-run) " } else { "" };
if report.no_op {
eprintln!("{}No metadata changes needed for {}", prefix, input.display());
} else {
for change in &report.changes {
eprintln!("{}{}", prefix, describe_exth_change(change));
}
if report.cover_updated {
eprintln!("{}Replaced cover image record", prefix);
}
let n = report.changes.len() + if report.cover_updated { 1 } else { 0 };
eprintln!(
"{}Rewrote {} metadata field{} in {}",
prefix,
n,
if n == 1 { "" } else { "s" },
input.display()
);
}
if !dry_run {
eprintln!("Output written to {}", output_path.display());
}
}
}
fn describe_exth_change(change: &mobi_rewrite::ExthChange) -> String {
match change {
mobi_rewrite::ExthChange::Added { exth_type, value } => {
format!("added EXTH {} ({})", exth_type, preview_bytes(value))
}
mobi_rewrite::ExthChange::Replaced {
exth_type,
old_value,
new_value,
} => format!(
"replaced EXTH {} ({} -> {})",
exth_type,
preview_bytes(old_value),
preview_bytes(new_value)
),
mobi_rewrite::ExthChange::Removed { exth_type, old_value } => {
format!("removed EXTH {} ({})", exth_type, preview_bytes(old_value))
}
}
}
fn preview_bytes(b: &[u8]) -> String {
const MAX: usize = 80;
match std::str::from_utf8(b) {
Ok(s) if s.chars().all(|c| !c.is_control() || c == '\n' || c == '\t') => {
if s.len() <= MAX {
format!("{:?}", s)
} else {
format!("{:?}...", &s[..MAX])
}
}
_ => format!("{} bytes", b.len()),
}
}
fn rewrite_report_to_json(report: &mobi_rewrite::RewriteReport) -> String {
let mut out = String::new();
out.push('{');
out.push_str(&format!(
"\"input_path\":{},",
json_string(&report.input_path.display().to_string())
));
out.push_str(&format!(
"\"output_path\":{},",
json_string(&report.output_path.display().to_string())
));
out.push_str(&format!("\"no_op\":{},", report.no_op));
out.push_str(&format!("\"cover_updated\":{},", report.cover_updated));
out.push_str("\"changes\":[");
for (i, change) in report.changes.iter().enumerate() {
if i > 0 {
out.push(',');
}
out.push_str(&exth_change_to_json(change));
}
out.push(']');
out.push('}');
out
}
fn exth_change_to_json(change: &mobi_rewrite::ExthChange) -> String {
match change {
mobi_rewrite::ExthChange::Added { exth_type, value } => format!(
"{{\"kind\":\"added\",\"exth_type\":{},\"value\":{}}}",
exth_type,
json_bytes(value)
),
mobi_rewrite::ExthChange::Replaced {
exth_type,
old_value,
new_value,
} => format!(
"{{\"kind\":\"replaced\",\"exth_type\":{},\"old_value\":{},\"new_value\":{}}}",
exth_type,
json_bytes(old_value),
json_bytes(new_value)
),
mobi_rewrite::ExthChange::Removed {
exth_type,
old_value,
} => format!(
"{{\"kind\":\"removed\",\"exth_type\":{},\"old_value\":{}}}",
exth_type,
json_bytes(old_value)
),
}
}
fn json_bytes(b: &[u8]) -> String {
match std::str::from_utf8(b) {
Ok(s) => json_string(s),
Err(_) => {
let mut out = String::from("{\"hex\":\"");
for byte in b {
out.push_str(&format!("{:02x}", byte));
}
out.push_str("\"}");
out
}
}
}
fn json_string(s: &str) -> String {
let mut out = String::with_capacity(s.len() + 2);
out.push('"');
for c in s.chars() {
match c {
'"' => out.push_str("\\\""),
'\\' => out.push_str("\\\\"),
'\n' => out.push_str("\\n"),
'\r' => out.push_str("\\r"),
'\t' => out.push_str("\\t"),
c if (c as u32) < 0x20 => out.push_str(&format!("\\u{:04x}", c as u32)),
c => out.push(c),
}
}
out.push('"');
out
}