use std::time::{SystemTime, UNIX_EPOCH};
use serde::{Deserialize, Serialize};
use tls_codec::{TlsDeserialize, TlsSerialize, TlsSize};
const DEFAULT_KEY_PACKAGE_LIFETIME_SECONDS: u64 = 60 * 60 * 24 * 28 * 3;
const DEFAULT_KEY_PACKAGE_LIFETIME_MARGIN_SECONDS: u64 = 60 * 60;
const MAX_LEAF_NODE_LIFETIME_RANGE_SECONDS: u64 =
DEFAULT_KEY_PACKAGE_LIFETIME_MARGIN_SECONDS + DEFAULT_KEY_PACKAGE_LIFETIME_SECONDS;
#[derive(
PartialEq, Eq, Copy, Clone, Debug, TlsSerialize, TlsSize, TlsDeserialize, Serialize, Deserialize,
)]
pub struct Lifetime {
not_before: u64,
not_after: u64,
}
impl Lifetime {
pub fn new(t: u64) -> Self {
let lifetime_margin: u64 = DEFAULT_KEY_PACKAGE_LIFETIME_MARGIN_SECONDS;
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("SystemTime before UNIX EPOCH!")
.as_secs();
let not_before = now - lifetime_margin;
let not_after = now + t;
Self {
not_before,
not_after,
}
}
pub(crate) fn is_valid(&self) -> bool {
match SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|duration| duration.as_secs())
{
Ok(elapsed) => self.not_before < elapsed && elapsed < self.not_after,
Err(_) => {
log::error!("SystemTime before UNIX EPOCH.");
false
}
}
}
pub fn has_acceptable_range(&self) -> bool {
self.not_after.saturating_sub(self.not_before) <= MAX_LEAF_NODE_LIFETIME_RANGE_SECONDS
}
}
impl Default for Lifetime {
fn default() -> Self {
Lifetime::new(DEFAULT_KEY_PACKAGE_LIFETIME_SECONDS)
}
}
#[cfg(test)]
mod tests {
use tls_codec::{Deserialize, Serialize};
use super::Lifetime;
#[test]
fn lifetime() {
let ext = Lifetime::default();
assert!(ext.is_valid());
let ext = Lifetime::new(0);
std::thread::sleep(std::time::Duration::from_secs(1));
assert!(!ext.is_valid());
let serialized = ext
.tls_serialize_detached()
.expect("error encoding life time extension");
let ext_deserialized = Lifetime::tls_deserialize(&mut serialized.as_slice())
.expect("Error deserializing lifetime");
assert!(!ext_deserialized.is_valid());
}
}