use std::io::{self, Write};
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::planner::{Layout, PyramidPlan, TileCoord};
pub const SCHEMA_VERSION: &str = "1";
pub const CHECKPOINT_FILENAME: &str = ".libviprs-job.json";
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
pub enum ResumeMode {
#[default]
Overwrite,
Resume,
Verify,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResumePolicy {
mode: ResumeMode,
checkpoint_every: u64,
checkpoint_root: Option<PathBuf>,
}
impl Default for ResumePolicy {
fn default() -> Self {
Self {
mode: ResumeMode::Overwrite,
checkpoint_every: 0,
checkpoint_root: None,
}
}
}
impl ResumePolicy {
pub fn overwrite() -> Self {
Self {
mode: ResumeMode::Overwrite,
..Self::default()
}
}
pub fn resume() -> Self {
Self {
mode: ResumeMode::Resume,
..Self::default()
}
}
pub fn verify() -> Self {
Self {
mode: ResumeMode::Verify,
..Self::default()
}
}
pub fn with_checkpoint_every(mut self, n: u64) -> Self {
self.checkpoint_every = n;
self
}
pub fn with_checkpoint_root(mut self, path: impl Into<PathBuf>) -> Self {
self.checkpoint_root = Some(path.into());
self
}
pub fn mode(&self) -> ResumeMode {
self.mode
}
pub fn checkpoint_every(&self) -> u64 {
self.checkpoint_every
}
pub fn checkpoint_root(&self) -> Option<&Path> {
self.checkpoint_root.as_deref()
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[non_exhaustive]
pub struct JobMetadata {
pub schema_version: String,
pub plan_hash: String,
#[serde(with = "tile_coord_vec_serde")]
pub completed_tiles: Vec<TileCoord>,
#[serde(default)]
pub levels_completed: Vec<u32>,
#[serde(default)]
pub started_at: String,
#[serde(default)]
pub last_checkpoint_at: String,
}
impl JobMetadata {
pub fn new(plan_hash: String, started_at: String) -> Self {
Self {
schema_version: SCHEMA_VERSION.to_string(),
plan_hash,
completed_tiles: Vec::new(),
levels_completed: Vec::new(),
last_checkpoint_at: started_at.clone(),
started_at,
}
}
}
pub(super) mod tile_coord_vec_serde {
use super::TileCoord;
use serde::de::{SeqAccess, Visitor};
use serde::ser::SerializeSeq;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
#[derive(Serialize, Deserialize)]
pub(super) struct CoordShadow {
pub(super) level: u32,
pub(super) col: u32,
pub(super) row: u32,
}
impl From<&TileCoord> for CoordShadow {
fn from(c: &TileCoord) -> Self {
Self {
level: c.level,
col: c.col,
row: c.row,
}
}
}
impl From<CoordShadow> for TileCoord {
fn from(s: CoordShadow) -> Self {
TileCoord {
level: s.level,
col: s.col,
row: s.row,
}
}
}
pub fn serialize<S>(coords: &[TileCoord], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(coords.len()))?;
for c in coords {
seq.serialize_element(&CoordShadow::from(c))?;
}
seq.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TileCoord>, D::Error>
where
D: Deserializer<'de>,
{
struct V;
impl<'de> Visitor<'de> for V {
type Value = Vec<TileCoord>;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("a sequence of {level,col,row} tile coordinates")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut out = Vec::with_capacity(seq.size_hint().unwrap_or(0));
while let Some(shadow) = seq.next_element::<CoordShadow>()? {
out.push(shadow.into());
}
Ok(out)
}
}
deserializer.deserialize_seq(V)
}
}
#[derive(Debug, Error)]
pub enum ResumeError {
#[error("plan hash mismatch: checkpoint records {expected}, current plan hashes to {actual}")]
PlanHashMismatch {
expected: String,
actual: String,
},
#[error("checkpoint schema mismatch: binary speaks version {expected}, file declares {found}")]
SchemaMismatch {
expected: &'static str,
found: String,
},
#[error("checkpoint at {path} is corrupt: {source}")]
Corrupt {
path: PathBuf,
#[source]
source: serde_json::Error,
},
#[error("checkpoint I/O error: {0}")]
Io(#[from] io::Error),
}
pub struct JobCheckpoint;
impl JobCheckpoint {
pub fn checkpoint_path(dir: &Path) -> PathBuf {
dir.join(CHECKPOINT_FILENAME)
}
pub fn load(dir: &Path) -> Result<Option<JobMetadata>, ResumeError> {
let path = Self::checkpoint_path(dir);
let bytes = match std::fs::read(&path) {
Ok(b) => b,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(None),
Err(e) => return Err(ResumeError::Io(e)),
};
let meta: JobMetadata = serde_json::from_slice(&bytes)
.map_err(|source| ResumeError::Corrupt { path, source })?;
if meta.schema_version != SCHEMA_VERSION {
return Err(ResumeError::SchemaMismatch {
expected: SCHEMA_VERSION,
found: meta.schema_version,
});
}
Ok(Some(meta))
}
pub fn save(dir: &Path, meta: &JobMetadata) -> io::Result<()> {
std::fs::create_dir_all(dir)?;
let final_path = Self::checkpoint_path(dir);
let tmp_path = tmp_path_for(&final_path);
let bytes = serde_json::to_vec_pretty(meta)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
{
let mut f = std::fs::File::create(&tmp_path)?;
f.write_all(&bytes)?;
f.sync_all()?;
}
std::fs::rename(&tmp_path, &final_path)?;
Ok(())
}
}
fn tmp_path_for(final_path: &Path) -> PathBuf {
let mut s = final_path.as_os_str().to_owned();
s.push(".tmp");
PathBuf::from(s)
}
pub fn is_tile_completed(meta: &JobMetadata, coord: &TileCoord) -> bool {
meta.completed_tiles.iter().any(|c| c == coord)
}
pub fn compute_plan_hash(plan: &PyramidPlan) -> String {
const DOMAIN: &[u8] = b"libviprs/plan/v1";
let mut hasher = blake3::Hasher::new();
hasher.update(DOMAIN);
hasher.update(&plan.image_width.to_le_bytes());
hasher.update(&plan.image_height.to_le_bytes());
hasher.update(&plan.tile_size.to_le_bytes());
hasher.update(&plan.overlap.to_le_bytes());
hasher.update(&[layout_tag(plan.layout)]);
hasher.update(&plan.canvas_width.to_le_bytes());
hasher.update(&plan.canvas_height.to_le_bytes());
hasher.update(&[u8::from(plan.centre)]);
hasher.update(&plan.centre_offset_x.to_le_bytes());
hasher.update(&plan.centre_offset_y.to_le_bytes());
hasher.update(&(plan.levels.len() as u64).to_le_bytes());
for lvl in &plan.levels {
hasher.update(&lvl.level.to_le_bytes());
hasher.update(&lvl.width.to_le_bytes());
hasher.update(&lvl.height.to_le_bytes());
hasher.update(&lvl.cols.to_le_bytes());
hasher.update(&lvl.rows.to_le_bytes());
}
hasher.finalize().to_hex().to_string()
}
fn layout_tag(layout: Layout) -> u8 {
match layout {
Layout::DeepZoom => 1,
Layout::Xyz => 2,
Layout::Google => 3,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::planner::PyramidPlanner;
fn sample_plan() -> PyramidPlan {
PyramidPlanner::new(128, 128, 64, 0, Layout::DeepZoom)
.unwrap()
.plan()
}
fn sample_meta(hash: &str) -> JobMetadata {
JobMetadata {
schema_version: SCHEMA_VERSION.to_string(),
plan_hash: hash.to_string(),
completed_tiles: vec![TileCoord::new(0, 0, 0), TileCoord::new(1, 1, 0)],
levels_completed: vec![0],
started_at: "1970-01-01T00:00:00Z".into(),
last_checkpoint_at: "1970-01-01T00:00:00Z".into(),
}
}
#[test]
fn default_mode_is_overwrite() {
assert_eq!(ResumeMode::default(), ResumeMode::Overwrite);
}
#[test]
fn checkpoint_path_is_well_known_filename() {
let p = JobCheckpoint::checkpoint_path(Path::new("/tmp/out"));
assert_eq!(p, PathBuf::from("/tmp/out/.libviprs-job.json"));
}
#[test]
#[cfg_attr(miri, ignore)] fn save_and_load_roundtrip() {
let dir = tempfile::tempdir().unwrap();
let plan = sample_plan();
let hash = compute_plan_hash(&plan);
let meta = sample_meta(&hash);
JobCheckpoint::save(dir.path(), &meta).unwrap();
let loaded = JobCheckpoint::load(dir.path()).unwrap().unwrap();
assert_eq!(loaded, meta);
}
#[test]
#[cfg_attr(miri, ignore)] fn load_returns_none_when_missing() {
let dir = tempfile::tempdir().unwrap();
assert!(JobCheckpoint::load(dir.path()).unwrap().is_none());
}
#[test]
#[cfg_attr(miri, ignore)] fn load_rejects_corrupt_json() {
let dir = tempfile::tempdir().unwrap();
let path = JobCheckpoint::checkpoint_path(dir.path());
std::fs::write(&path, b"{not valid json").unwrap();
match JobCheckpoint::load(dir.path()) {
Err(ResumeError::Corrupt { path: p, .. }) => assert_eq!(p, path),
other => panic!("expected Corrupt, got {other:?}"),
}
}
#[test]
#[cfg_attr(miri, ignore)] fn load_rejects_schema_mismatch() {
let dir = tempfile::tempdir().unwrap();
let path = JobCheckpoint::checkpoint_path(dir.path());
std::fs::write(
&path,
br#"{
"schema_version": "999",
"plan_hash": "deadbeef",
"completed_tiles": [],
"levels_completed": [],
"started_at": "",
"last_checkpoint_at": ""
}"#,
)
.unwrap();
match JobCheckpoint::load(dir.path()) {
Err(ResumeError::SchemaMismatch { expected, found }) => {
assert_eq!(expected, SCHEMA_VERSION);
assert_eq!(found, "999");
}
other => panic!("expected SchemaMismatch, got {other:?}"),
}
}
#[test]
#[cfg_attr(miri, ignore)] fn save_is_atomic_no_tmp_left_behind() {
let dir = tempfile::tempdir().unwrap();
let plan = sample_plan();
let meta = sample_meta(&compute_plan_hash(&plan));
JobCheckpoint::save(dir.path(), &meta).unwrap();
let tmp = tmp_path_for(&JobCheckpoint::checkpoint_path(dir.path()));
assert!(!tmp.exists(), "tmp file should be renamed, not linger");
assert!(JobCheckpoint::checkpoint_path(dir.path()).exists());
}
#[test]
fn plan_hash_is_deterministic() {
let plan = sample_plan();
assert_eq!(compute_plan_hash(&plan), compute_plan_hash(&plan));
}
#[test]
fn plan_hash_changes_with_tile_size() {
let a = PyramidPlanner::new(128, 128, 64, 0, Layout::DeepZoom)
.unwrap()
.plan();
let b = PyramidPlanner::new(128, 128, 32, 0, Layout::DeepZoom)
.unwrap()
.plan();
assert_ne!(compute_plan_hash(&a), compute_plan_hash(&b));
}
#[test]
fn plan_hash_changes_with_layout() {
let a = PyramidPlanner::new(256, 256, 64, 0, Layout::DeepZoom)
.unwrap()
.plan();
let b = PyramidPlanner::new(256, 256, 64, 0, Layout::Xyz)
.unwrap()
.plan();
assert_ne!(compute_plan_hash(&a), compute_plan_hash(&b));
}
#[test]
fn plan_hash_changes_with_overlap() {
let a = PyramidPlanner::new(256, 256, 64, 0, Layout::DeepZoom)
.unwrap()
.plan();
let b = PyramidPlanner::new(256, 256, 64, 1, Layout::DeepZoom)
.unwrap()
.plan();
assert_ne!(compute_plan_hash(&a), compute_plan_hash(&b));
}
#[test]
fn plan_hash_is_lowercase_hex() {
let hash = compute_plan_hash(&sample_plan());
assert_eq!(hash.len(), 64, "Blake3 produces a 32-byte / 64-hex digest");
assert!(
hash.chars()
.all(|c| c.is_ascii_hexdigit() && !c.is_ascii_uppercase()),
"hash should be lowercase hex: {hash}"
);
}
#[test]
fn is_tile_completed_reports_membership() {
let meta = sample_meta("deadbeef");
assert!(is_tile_completed(&meta, &TileCoord::new(0, 0, 0)));
assert!(is_tile_completed(&meta, &TileCoord::new(1, 1, 0)));
assert!(!is_tile_completed(&meta, &TileCoord::new(2, 0, 0)));
}
}