use std::ops::Add;
use std::ops::AddAssign;
use std::ops::Sub;
use std::ops::SubAssign;
use std::time::Duration;
use web_time::SystemTime;
use web_time::UNIX_EPOCH;
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct Instant(u64);
impl Instant {
pub const ZERO: Instant = Instant(0);
#[inline]
pub fn now() -> Instant {
Instant(crate::current_cycle())
}
pub fn duration_since(&self, earlier: Instant) -> Duration {
self.checked_duration_since(earlier).unwrap_or_default()
}
pub fn checked_duration_since(&self, earlier: Instant) -> Option<Duration> {
Some(Duration::from_nanos(
(self.0.checked_sub(earlier.0)? as f64 * crate::nanos_per_cycle()) as u64,
))
}
pub fn saturating_duration_since(&self, earlier: Instant) -> Duration {
self.checked_duration_since(earlier).unwrap_or_default()
}
#[inline]
pub fn elapsed(&self) -> Duration {
Instant::now() - *self
}
pub fn checked_add(&self, duration: Duration) -> Option<Instant> {
self.0
.checked_add((duration.as_nanos() as u64 as f64 / crate::nanos_per_cycle()) as u64)
.map(Instant)
}
pub fn checked_sub(&self, duration: Duration) -> Option<Instant> {
self.0
.checked_sub((duration.as_nanos() as u64 as f64 / crate::nanos_per_cycle()) as u64)
.map(Instant)
}
pub fn as_unix_nanos(&self, anchor: &Anchor) -> u64 {
if self.0 > anchor.cycle {
let forward_ns = ((self.0 - anchor.cycle) as f64 * crate::nanos_per_cycle()) as u64;
anchor.unix_time_ns + forward_ns
} else {
let backward_ns = ((anchor.cycle - self.0) as f64 * crate::nanos_per_cycle()) as u64;
anchor.unix_time_ns - backward_ns
}
}
}
impl Add<Duration> for Instant {
type Output = Instant;
fn add(self, other: Duration) -> Instant {
self.checked_add(other)
.expect("overflow when adding duration to instant")
}
}
impl AddAssign<Duration> for Instant {
fn add_assign(&mut self, other: Duration) {
*self = *self + other;
}
}
impl Sub<Duration> for Instant {
type Output = Instant;
fn sub(self, other: Duration) -> Instant {
self.checked_sub(other)
.expect("overflow when subtracting duration from instant")
}
}
impl SubAssign<Duration> for Instant {
fn sub_assign(&mut self, other: Duration) {
*self = *self - other;
}
}
impl Sub<Instant> for Instant {
type Output = Duration;
fn sub(self, other: Instant) -> Duration {
self.duration_since(other)
}
}
impl std::fmt::Debug for Instant {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[derive(Copy, Clone)]
pub struct Anchor {
unix_time_ns: u64,
cycle: u64,
}
impl Default for Anchor {
fn default() -> Self {
Self::new()
}
}
impl Anchor {
#[inline]
pub fn new() -> Anchor {
let unix_time_ns = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("unexpected time drift")
.as_nanos() as u64;
Anchor {
unix_time_ns,
cycle: crate::current_cycle(),
}
}
}
#[cfg(all(feature = "atomic", target_has_atomic = "64"))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "atomic", target_has_atomic = "64"))))]
mod atomic {
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use super::Instant;
#[derive(Debug)]
#[repr(transparent)]
pub struct Atomic(AtomicU64);
impl Atomic {
#[inline]
pub fn fetch_max(&self, val: Instant, order: Ordering) -> Instant {
Instant(self.0.fetch_max(val.0, order))
}
#[inline]
pub fn fetch_min(&self, val: Instant, order: Ordering) -> Instant {
Instant(self.0.fetch_min(val.0, order))
}
#[inline]
pub fn into_instant(self) -> Instant {
Instant(self.0.into_inner())
}
#[inline]
pub fn load(&self, order: Ordering) -> Instant {
Instant(self.0.load(order))
}
#[inline]
pub fn new(v: Instant) -> Self {
Self(AtomicU64::new(v.0))
}
#[inline]
pub fn store(&self, val: Instant, order: Ordering) {
self.0.store(val.0, order)
}
#[inline]
pub fn swap(&self, val: Instant, order: Ordering) -> Instant {
Instant(self.0.swap(val.0, order))
}
}
impl From<Instant> for Atomic {
#[inline]
fn from(instant: Instant) -> Self {
Self::new(instant)
}
}
}
#[cfg(all(feature = "atomic", target_has_atomic = "64"))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "atomic", target_has_atomic = "64"))))]
pub use atomic::Atomic;