use std::{fs, io};
use std::fs::File;
use std::io::{BufReader, BufWriter, Seek, SeekFrom, Write};
use std::path::{Path, PathBuf};
use bytes::Bytes;
use log::error;
use rpki::uri;
use rpki::crypto::DigestAlgorithm;
use rpki::repository::cert::{Cert, ResourceCert};
use rpki::repository::manifest::{ManifestContent, ManifestHash};
use rpki::repository::tal::TalUri;
use rpki::repository::x509::{Serial, Time};
use tempfile::NamedTempFile;
use crate::collector;
use crate::config::Config;
use crate::engine::CaCert;
use crate::error::{Failed, Fatal, RunFailed};
use crate::metrics::Metrics;
use crate::utils::fatal;
use crate::utils::binio::{Compose, Parse, ParseError};
use crate::utils::dump::DumpRegistry;
use crate::utils::json::JsonBuilder;
use crate::utils::uri::UriExt;
#[derive(Clone, Debug)]
pub struct Store {
path: PathBuf,
}
impl Store {
const STATUS_NAME: &'static str = "status.bin";
const RSYNC_TA_PATH: &'static str = "ta/rsync";
const HTTPS_TA_PATH: &'static str = "ta/https";
const RRDP_BASE: &'static str = "rrdp";
const RSYNC_PATH: &'static str = "rsync";
const TMP_BASE: &'static str = "tmp";
}
impl Store {
fn create_base_dir(config: &Config) -> Result<PathBuf, Failed> {
let path = config.cache_dir.join("stored");
if let Err(err) = fs::create_dir_all(&path) {
error!(
"Failed to create store directory {}: {}",
path.display(), err
);
return Err(Failed)
}
Ok(path)
}
pub fn init(config: &Config) -> Result<(), Failed> {
Self::create_base_dir(config)?;
Ok(())
}
pub fn new(config: &Config) -> Result<Self, Failed> {
Ok(Store {
path: Self::create_base_dir(config)?,
})
}
pub fn sanitize(&self) -> Result<(), Fatal> {
Ok(())
}
pub fn start(&self) -> Run<'_> {
Run::new(self)
}
pub fn status(&self) -> Result<Option<StoredStatus>, Failed> {
let path = self.status_path();
let Some(mut file) = fatal::open_existing_file(&path)? else {
return Ok(None)
};
match StoredStatus::read(&mut file) {
Ok(status) => Ok(Some(status)),
Err(err) => {
error!("Failed to read store status file {}: {}",
path.display(), err
);
Err(Failed)
}
}
}
fn status_path(&self) -> PathBuf {
self.path.join(Self::STATUS_NAME)
}
fn ta_path(&self, uri: &TalUri) -> PathBuf {
match *uri {
TalUri::Rsync(ref uri) => {
self.path.join(
uri.unique_path(Self::RSYNC_TA_PATH, ".cer")
)
}
TalUri::Https(ref uri) => {
self.path.join(
uri.unique_path(Self::HTTPS_TA_PATH, ".cer")
)
}
}
}
fn rrdp_repository_base(&self) -> PathBuf {
self.path.join(Self::RRDP_BASE)
}
fn rrdp_repository_path(&self, uri: &uri::Https) -> PathBuf {
self.path.join(uri.unique_path(Self::RRDP_BASE, ""))
}
fn rsync_repository_path(&self) -> PathBuf {
self.path.join(Self::RSYNC_PATH)
}
fn tmp_file(&self) -> Result<NamedTempFile, Failed> {
let tmp_dir = self.path.join(Self::TMP_BASE);
fatal::create_dir_all(&tmp_dir)?;
NamedTempFile::new_in(&tmp_dir).map_err(|err| {
error!(
"Fatal: failed to create temporary file in {}: {}",
tmp_dir.display(), err
);
Failed
})
}
}
impl Store {
pub fn dump(&self, dir: &Path) -> Result<(), Failed> {
self.dump_subdir(Self::RSYNC_TA_PATH, dir)?;
self.dump_subdir(Self::HTTPS_TA_PATH, dir)?;
let dir = dir.join("store");
fatal::remove_dir_all(&dir)?;
let mut repos = DumpRegistry::new(dir);
self.dump_point_tree(&self.rsync_repository_path(), &mut repos)?;
self.dump_point_tree(&self.rrdp_repository_base(), &mut repos)?;
self.dump_repository_json(repos)?;
Ok(())
}
fn dump_subdir(
&self,
subdir: &str,
target_base: &Path,
) -> Result<(), Failed> {
let source = self.path.join(subdir);
let target = target_base.join(subdir);
fatal::remove_dir_all(&target)?;
fatal::copy_existing_dir_all(&source, &target)?;
Ok(())
}
fn dump_point_tree(
&self,
path: &Path,
repos: &mut DumpRegistry,
) -> Result<(), Failed> {
let dir = match fatal::read_existing_dir(path)? {
Some(dir) => dir,
None => return Ok(())
};
for entry in dir {
let entry = entry?;
if entry.is_dir() {
self.dump_point_tree(entry.path(), repos)?;
}
else if entry.is_file() {
self.dump_point(entry.path(), repos)?;
}
}
Ok(())
}
fn dump_point(
&self,
path: &Path,
repos: &mut DumpRegistry,
) -> Result<(), Failed> {
let mut file = File::open(path).map_err(|err| {
error!(
"Fatal: failed to open file {}: {}",
path.display(), err
);
Failed
})?;
let header = match StoredPointHeader::read(&mut file) {
Ok(some) => some,
Err(err) => {
error!(
"Skipping {}: failed to read file: {}",
path.display(), err
);
return Ok(())
}
};
let manifest = StoredManifest::read(&mut file).map_err(|err| {
error!(
"Fatal: failed to read file {}: {}",
path.display(), err
);
Failed
})?;
let repo_dir = repos.get_repo_path(header.rpki_notify.as_ref());
self.dump_object(
&repo_dir, &header.manifest_uri, &manifest.manifest
)?;
self.dump_object(&repo_dir, &manifest.crl_uri, &manifest.crl)?;
while let Some(object) = StoredObject::read(&mut file).map_err(|err| {
error!(
"Fatal: failed to read file {}: {}",
path.display(), err
);
Failed
})? {
self.dump_object(&repo_dir, &object.uri, &object.content)?;
}
Ok(())
}
fn dump_object(
&self,
dir: &Path,
uri: &uri::Rsync,
content: &[u8]
) -> Result<(), Failed> {
let path = dir.join(
format!("{}/{}/{}",
uri.canonical_authority(),
uri.module_name(),
uri.path()
)
);
if let Some(dir) = path.parent() {
fatal::create_dir_all(dir)?;
}
let mut target = match File::create(&path) {
Ok(some) => some,
Err(err) => {
error!(
"Fatal: cannot create target file {}: {}",
path.display(), err
);
return Err(Failed)
}
};
if let Err(err) = target.write_all(content) {
error!(
"Fatal: failed to write to target file {}: {}",
path.display(), err
);
return Err(Failed)
}
Ok(())
}
fn dump_repository_json(
&self,
repos: DumpRegistry,
) -> Result<(), Failed> {
fatal::create_dir_all(repos.base_dir())?;
let path = repos.base_dir().join("repositories.json");
fatal::write_file(
&path,
JsonBuilder::build(|builder| {
builder.member_array("repositories", |builder| {
for (key, value) in repos.rrdp_uris() {
builder.array_object(|builder| {
builder.member_str(
"path", value
);
builder.member_str("type", "rrdp");
builder.member_str(
"rpkiNotify",
key
);
})
}
builder.array_object(|builder| {
builder.member_str("path", "rsync");
builder.member_str("type", "rsync");
});
})
}).as_bytes()
)
}
}
#[derive(Debug)]
pub struct Run<'a> {
store: &'a Store,
started: Time,
}
impl<'a> Run<'a> {
fn new(
store: &'a Store,
) -> Self {
Run {
store,
started: Time::now(),
}
}
pub fn done(self, metrics: &mut Metrics) {
let _ = metrics;
let path = self.store.status_path();
let Ok(mut file) = fatal::create_file(&path) else {
return
};
if let Err(err) = StoredStatus::new(Time::now()).write(&mut file) {
error!(
"Failed to write store status file {}: {}",
path.display(), err
);
}
}
pub fn load_ta(&self, uri: &TalUri) -> Result<Option<Bytes>, Failed> {
fatal::read_existing_file(&self.store.ta_path(uri)).map(|maybe| {
maybe.map(Into::into)
})
}
pub fn update_ta(
&self, uri: &TalUri, content: &[u8]
) -> Result<(), Failed> {
let path = self.store.ta_path(uri);
if let Some(dir) = path.parent() {
fatal::create_dir_all(dir)?;
}
fatal::write_file(&path, content)
}
pub fn repository(&self, ca_cert: &CaCert) -> Repository {
Repository::new(self.store, ca_cert.rpki_notify().cloned())
}
pub fn pub_point(
&self, ca_cert: &CaCert
) -> Result<StoredPoint, Failed> {
self.repository(ca_cert).get_point(ca_cert.rpki_manifest())
}
}
impl Run<'_> {
pub fn cleanup(
&self,
collector: &mut collector::Cleanup,
) -> Result<(), Failed> {
self.cleanup_ta()?;
self.cleanup_points(&self.store.rrdp_repository_base(), collector)?;
self.cleanup_points(&self.store.rsync_repository_path(), collector)?;
self.cleanup_tmp()?;
Ok(())
}
fn cleanup_points(
&self,
base: &Path,
retain: &mut collector::Cleanup,
) -> Result<(), Failed> {
Self::cleanup_dir_tree(base, |path| {
if let Some(stored) = StoredPoint::load_quietly(path.into()) {
if stored.retain(self.started) {
if let Some(uri) = stored.header.rpki_notify.as_ref() {
retain.add_rrdp_repository(uri)
}
else {
retain.add_rsync_module(&stored.header.manifest_uri)
}
return Ok(true)
}
}
Ok(false)
})
}
fn cleanup_ta(&self) -> Result<(), Failed> {
Self::cleanup_dir_tree(&self.store.path.join("ta"), |path| {
let content = fatal::read_file(path)?;
if let Ok(cert) = Cert::decode(Bytes::from(content)) {
if cert.validity().not_after() > Time::now() {
return Ok(true)
}
}
Ok(false)
})
}
fn cleanup_tmp(&self) -> Result<(), Failed> {
Self::cleanup_dir_tree(&self.store.path.join("tmp"), |_path| {
Ok(false)
})
}
fn cleanup_dir_tree(
base: &Path,
mut keep: impl FnMut(&Path) -> Result<bool, Failed>
) -> Result<(), Failed> {
fn recurse(
base: &Path,
top: bool,
op: &mut impl FnMut(&Path) -> Result<bool, Failed>
) -> Result<bool, Failed> {
let dir = if top {
match fatal::read_existing_dir(base)? {
Some(dir) => dir,
None => return Ok(false),
}
}
else {
fatal::read_dir(base)?
};
let mut keep = false;
for entry in dir {
let entry = entry?;
if entry.is_dir() {
if !recurse(entry.path(), false, op)? {
fatal::remove_dir_all(entry.path())?;
}
else {
keep = true;
}
}
else if entry.is_file() {
if !op(entry.path())? {
fatal::remove_file(entry.path())?;
}
else {
keep = true;
}
}
}
Ok(keep)
}
recurse(base, true, &mut keep).map(|_| ())
}
}
pub struct Repository {
path: PathBuf,
rpki_notify: Option<uri::Https>,
}
impl Repository {
fn new(store: &Store, rpki_notify: Option<uri::Https>) -> Self {
Self {
path: if let Some(rpki_notify) = rpki_notify.as_ref() {
store.rrdp_repository_path(rpki_notify)
}
else {
store.rsync_repository_path()
},
rpki_notify
}
}
pub fn rpki_notify(&self) -> Option<&uri::Https> {
self.rpki_notify.as_ref()
}
pub fn is_rrdp(&self) -> bool {
self.rpki_notify.is_some()
}
pub fn get_point(
&self, manifest_uri: &uri::Rsync
) -> Result<StoredPoint, Failed> {
StoredPoint::open(
self.point_path(manifest_uri),
manifest_uri, self.rpki_notify.as_ref(),
)
}
fn point_path(&self, manifest_uri: &uri::Rsync) -> PathBuf {
self.path.join(
format!(
"rsync/{}/{}/{}",
manifest_uri.canonical_authority(),
manifest_uri.module_name(),
manifest_uri.path(),
)
)
}
}
pub struct StoredPoint {
path: PathBuf,
is_new: bool,
header: StoredPointHeader,
manifest: Option<StoredManifest>,
file: Option<BufReader<File>>,
}
impl StoredPoint {
fn open(
path: PathBuf,
manifest_uri: &uri::Rsync,
rpki_notify: Option<&uri::Https>,
) -> Result<Self, Failed> {
let mut file = match File::open(&path) {
Ok(file) => BufReader::new(file),
Err(ref err) if err.kind() == io::ErrorKind::NotFound => {
return Self::create(path, manifest_uri, rpki_notify);
}
Err(err) => {
error!(
"Failed to open stored publication point at {}: {}",
path.display(), err
);
return Err(Failed)
}
};
let mut header = match StoredPointHeader::read(&mut file) {
Ok(header) => header,
Err(err) if !err.is_fatal() => {
return Self::create(path, manifest_uri, rpki_notify);
}
Err(err) => {
error!(
"Failed to read stored publication point at {}: {}",
path.display(), err
);
return Err(Failed)
}
};
if matches!(header.update_status, UpdateStatus::LastAttempt(_)) {
header.update_status = UpdateStatus::LastAttempt(Time::now());
drop(file);
let mut file = File::create(&path).map_err(|err| {
error!(
"Failed to update stored publication point at {}: \
re-open: {}",
path.display(), err
);
Failed
})?;
if let Err(err) = file.seek(SeekFrom::Start(0)) {
error!(
"Failed to update stored publication point at {}: \
seek failed: {}",
path.display(), err
);
return Err(Failed)
}
if let Err(err) = header.write(&mut file) {
error!(
"Failed to update stored publication point at {}: \
write failed: {}",
path.display(), err
);
return Err(Failed)
}
return Ok(Self {
path,
is_new: false,
header,
manifest: None,
file: None,
})
}
let manifest = match StoredManifest::read(&mut file) {
Ok(manifest) => manifest,
Err(err) => {
error!(
"Failed to read stored publication point at {}: {}",
path.display(), err
);
return Err(Failed)
}
};
Ok(Self {
path,
is_new: false,
header,
manifest: Some(manifest),
file: Some(file)
})
}
fn create(
path: PathBuf,
manifest_uri: &uri::Rsync,
rpki_notify: Option<&uri::Https>,
) -> Result<Self, Failed> {
if let Some(path) = path.parent() {
fatal::create_dir_all(path)?;
}
let mut file = match File::create(&path) {
Ok(file) => file,
Err(err) => {
error!(
"Failed to create stored publication point at {}: {}",
path.display(), err
);
return Err(Failed)
}
};
let header = StoredPointHeader::new(
manifest_uri.clone(), rpki_notify.cloned(),
);
if let Err(err) = header.write(&mut file) {
error!(
"Failed to write stored publication point at {}: {}",
path.display(), err
);
return Err(Failed)
}
Ok(StoredPoint {
path,
is_new: true,
header,
manifest: None,
file: None,
})
}
pub fn load_quietly(path: PathBuf) -> Option<Self> {
let mut file = BufReader::new(File::open(&path).ok()?);
let header = StoredPointHeader::read(&mut file).ok()?;
let manifest = match header.update_status {
UpdateStatus::Success(_) => {
Some(StoredManifest::read(&mut file).ok()?)
}
UpdateStatus::LastAttempt(_) => None,
};
Some(Self {
path,
is_new: false,
header, manifest,
file: Some(file)
})
}
pub fn is_new(&self) -> bool {
self.is_new
}
pub fn update(
&mut self,
store: &Store,
manifest: StoredManifest,
objects: impl FnMut() -> Result<Option<StoredObject>, UpdateError>
) -> Result<(), UpdateError> {
let tmp_file = store.tmp_file()?;
self._update(tmp_file, manifest, objects)
}
fn _update(
&mut self,
tmp_file: NamedTempFile,
manifest: StoredManifest,
mut objects: impl FnMut() -> Result<Option<StoredObject>, UpdateError>
) -> Result<(), UpdateError> {
let mut tmp_file = BufWriter::new(tmp_file);
self.header.update_status = UpdateStatus::Success(Time::now());
if let Err(err) = self.header.write(&mut tmp_file) {
error!(
"Fatal: failed to write to file {}: {}",
tmp_file.get_ref().path().display(), err
);
return Err(UpdateError::fatal())
}
if let Err(err) = manifest.write(&mut tmp_file) {
error!(
"Fatal: failed to write to file {}: {}",
tmp_file.get_ref().path().display(), err
);
return Err(UpdateError::fatal())
}
let tmp_object_start = match tmp_file.stream_position() {
Ok(some) => some,
Err(err) => {
error!(
"Fatal: failed to get position in file {}: {}",
tmp_file.get_ref().path().display(), err
);
return Err(UpdateError::fatal())
}
};
while let Some(object) = objects()? {
if let Err(err) = object.write(&mut tmp_file) {
error!(
"Fatal: failed to write to file {}: {}",
tmp_file.get_ref().path().display(), err
);
return Err(UpdateError::fatal())
}
}
let tmp_file = tmp_file.into_inner().map_err(|err| {
let (err, tmp_file) = err.into_parts();
error!(
"Fatal: failed to write to file {}: {}",
tmp_file.get_ref().path().display(), err
);
UpdateError::fatal()
})?;
drop(self.file.take());
match tmp_file.persist(&self.path) {
Ok(file) => self.file = Some(BufReader::new(file)),
Err(err) => {
error!(
"Failed to persist temporary file {} to {}: {}",
err.file.path().display(), self.path.display(),
err.error,
);
return Err(UpdateError::fatal())
}
}
self.manifest = Some(manifest);
if let Some(file) = self.file.as_mut() {
if let Err(err) = file.seek(SeekFrom::Start(tmp_object_start)) {
error!(
"Fatal: failed to position file {}: {}",
self.path.display(), err
);
return Err(UpdateError::fatal())
}
}
Ok(())
}
pub fn reject(&mut self) -> Result<(), Failed> {
self.is_new = true;
self.header.update_status = UpdateStatus::LastAttempt(Time::now());
self.manifest = None;
self.file = None;
let mut file = match File::create(&self.path) {
Ok(file) => file,
Err(err) => {
error!(
"Failed to create stored publication point at {}: {}",
self.path.display(), err
);
return Err(Failed)
}
};
if let Err(err) = self.header.write(&mut file) {
error!(
"Failed to write stored publication point at {}: {}",
self.path.display(), err
);
return Err(Failed)
}
Ok(())
}
fn retain(&self, update_start: Time) -> bool {
if let Some(manifest) = self.manifest.as_ref() {
manifest.not_after > Time::now()
}
else if let UpdateStatus::LastAttempt(when)
= self.header.update_status
{
when >= update_start
}
else {
false
}
}
}
impl StoredPoint {
pub fn path(&self) -> &Path {
&self.path
}
pub fn manifest(&self) -> Option<&StoredManifest> {
self.manifest.as_ref()
}
}
impl Iterator for StoredPoint {
type Item = Result<StoredObject, ParseError>;
fn next(&mut self) -> Option<Self::Item> {
StoredObject::read(self.file.as_mut()?).transpose()
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct StoredPointHeader {
manifest_uri: uri::Rsync,
rpki_notify: Option<uri::Https>,
update_status: UpdateStatus,
}
impl StoredPointHeader {
const VERSION: u8 = 2;
pub fn new(
manifest_uri: uri::Rsync,
rpki_notify: Option<uri::Https>,
) -> Self {
Self {
manifest_uri, rpki_notify,
update_status: UpdateStatus::LastAttempt(Time::now()),
}
}
pub fn read(reader: &mut impl io::Read) -> Result<Self, ParseError> {
let version = u8::parse(reader)?;
if version != Self::VERSION {
return Err(ParseError::format(
format!("unexpected version {version}")
))
}
Ok(Self {
manifest_uri: Parse::parse(reader)?,
rpki_notify: Parse::parse(reader)?,
update_status: UpdateStatus::read(reader)?,
})
}
pub fn write(
&self, writer: &mut impl io::Write
) -> Result<(), io::Error> {
Self::VERSION.compose(writer)?;
self.manifest_uri.compose(writer)?;
self.rpki_notify.compose(writer)?;
self.update_status.write(writer)?;
Ok(())
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
enum UpdateStatus {
Success(Time),
LastAttempt(Time),
}
impl UpdateStatus {
pub fn read(reader: &mut impl io::Read) -> Result<Self, ParseError> {
match u8::parse(reader)? {
0 => Ok(UpdateStatus::Success(Parse::parse(reader)?)),
1 => Ok(UpdateStatus::LastAttempt(Parse::parse(reader)?)),
_ => {
Err(ParseError::format(
"invalid update status".to_string()
))
}
}
}
pub fn write(self, writer: &mut impl io::Write) -> Result<(), io::Error> {
match self {
Self::Success(time) => {
0u8.compose(writer)?;
time.compose(writer)?;
}
Self::LastAttempt(time) => {
1u8.compose(writer)?;
time.compose(writer)?;
}
}
Ok(())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct StoredManifest {
pub not_after: Time,
pub manifest_number: Serial,
pub this_update: Time,
pub ca_repository: uri::Rsync,
pub manifest: Bytes,
pub crl_uri: uri::Rsync,
pub crl: Bytes,
}
impl StoredManifest {
pub fn new(
ee_cert: &ResourceCert,
manifest: &ManifestContent,
ca_cert: &CaCert,
manifest_bytes: Bytes,
crl_uri: uri::Rsync,
crl: Bytes,
) -> Self {
StoredManifest {
not_after: ee_cert.validity().not_after(),
manifest_number: manifest.manifest_number(),
this_update: manifest.this_update(),
ca_repository: ca_cert.ca_repository().clone(),
manifest: manifest_bytes,
crl_uri,
crl
}
}
pub fn read(reader: &mut impl io::Read) -> Result<Self, ParseError> {
Ok(StoredManifest {
not_after: Parse::parse(reader)?,
manifest_number: Parse::parse(reader)?,
this_update: Parse::parse(reader)?,
ca_repository: Parse::parse(reader)?,
manifest: Parse::parse(reader)?,
crl_uri: Parse::parse(reader)?,
crl: Parse::parse(reader)?,
})
}
pub fn write(
&self, writer: &mut impl io::Write
) -> Result<(), io::Error> {
self.not_after.compose(writer)?;
self.manifest_number.compose(writer)?;
self.this_update.compose(writer)?;
self.ca_repository.compose(writer)?;
self.manifest.compose(writer)?;
self.crl_uri.compose(writer)?;
self.crl.compose(writer)?;
Ok(())
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct StoredObject {
pub uri: uri::Rsync,
pub hash: Option<ManifestHash>,
pub content: Bytes,
}
impl StoredObject {
pub fn new(
uri: uri::Rsync,
content: Bytes,
hash: Option<ManifestHash>,
) -> Self {
StoredObject { uri, hash, content }
}
pub fn read(
reader: &mut impl io::Read
) -> Result<Option<Self>, ParseError> {
let uri = match uri::Rsync::parse(reader) {
Ok(uri) => uri,
Err(err) if err.is_eof() => return Ok(None),
Err(err) => return Err(err),
};
let hash = match u8::parse(reader)? {
0 => None,
1 => {
let algorithm = DigestAlgorithm::sha256();
let mut value = vec![0u8; algorithm.digest_len()];
reader.read_exact(&mut value)?;
Some(ManifestHash::new(value.into(), algorithm))
}
hash_type => {
return Err(ParseError::format(
format!("unsupported hash type {hash_type}")
));
}
};
let content = Bytes::parse(reader)?;
Ok(Some(StoredObject { uri, hash, content }))
}
pub fn write(
&self, writer: &mut impl io::Write
) -> Result<(), io::Error> {
self.uri.compose(writer)?;
match self.hash.as_ref() {
Some(hash) if hash.algorithm().is_sha256() => {
1u8.compose(writer)?;
writer.write_all(hash.as_slice())?;
}
_ => {
0u8.compose(writer)?;
}
}
self.content.compose(writer)?;
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct StoredStatus {
pub last_update: Time,
}
impl StoredStatus {
const VERSION: u8 = 0;
pub fn new(last_update: Time) -> Self {
Self { last_update }
}
pub fn read(reader: &mut impl io::Read) -> Result<Self, ParseError> {
let version = u8::parse(reader)?;
if version != Self::VERSION {
return Err(ParseError::format(
format!("unexpected version {version}")
))
}
Ok(Self {
last_update: Parse::parse(reader)?
})
}
pub fn write(
&self, writer: &mut impl io::Write
) -> Result<(), io::Error> {
Self::VERSION.compose(writer)?;
self.last_update.compose(writer)?;
Ok(())
}
}
#[derive(Clone, Copy, Debug)]
pub enum UpdateError {
Abort,
Failed(RunFailed),
}
impl UpdateError {
pub fn fatal() -> Self {
UpdateError::Failed(RunFailed::fatal())
}
}
impl From<Failed> for UpdateError {
fn from(_: Failed) -> Self {
UpdateError::Failed(RunFailed::fatal())
}
}
impl From<RunFailed> for UpdateError {
fn from(err: RunFailed) -> Self {
UpdateError::Failed(err)
}
}
#[cfg(test)]
mod test {
use std::str::FromStr;
use super::*;
#[test]
fn read_write() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("stored.bin");
let manifest_uri = uri::Rsync::from_str(
"rsync://example.com/test/test.mft"
).unwrap();
let rpki_notify = Some(uri::Https::from_str(
"https://example.com/notification.xml"
).unwrap());
let manifest = StoredManifest {
not_after: Time::utc(2025, 10, 1, 16, 10, 22),
manifest_number: Serial::default(),
this_update: Time::utc(2010, 6, 3, 8, 11, 0),
ca_repository: uri::Rsync::from_str(
"rsync://example.com/test/"
).unwrap(),
manifest: Bytes::from_static(b"deadbeef"),
crl_uri: uri::Rsync::from_str(
"rsync://example.com/test/test.crl"
).unwrap(),
crl: Bytes::from_static(b"crlbytesgohere"),
};
let objects = [
StoredObject::new(
uri::Rsync::from_str(
"rsync://example.com/test/obj1.bin"
).unwrap(),
Bytes::from_static(b"object1content"),
Some(ManifestHash::new(
Bytes::copy_from_slice(
DigestAlgorithm::sha256().digest(
b"object1content"
).as_ref()
),
DigestAlgorithm::sha256()
))
),
StoredObject::new(
uri::Rsync::from_str(
"rsync://example.com/test/obj2.bin"
).unwrap(),
Bytes::from_static(b"object2stuff"),
None,
),
];
let mut point = StoredPoint::open(
path.clone(), &manifest_uri, rpki_notify.as_ref()
).unwrap();
let mut objects_iter = objects.iter();
point._update(
NamedTempFile::new_in(&dir).unwrap(),
manifest.clone(),
|| Ok(objects_iter.next().cloned())
).unwrap();
drop(point);
let mut point = StoredPoint::load_quietly(path.clone()).unwrap();
assert_eq!(point.manifest, Some(manifest));
assert_eq!(point.next().unwrap().unwrap(), objects[0]);
assert_eq!(point.next().unwrap().unwrap(), objects[1]);
assert!(point.next().is_none());
}
}