1use std::num::NonZeroU32;
2use std::sync::atomic::{AtomicU32, Ordering};
3
4const START: u32 = 1;
7
8#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
14pub struct Revision {
15 generation: NonZeroU32,
16}
17
18impl Revision {
19 pub(crate) fn start() -> Self {
20 Self::from(START)
21 }
22
23 pub(crate) fn from(g: u32) -> Self {
24 Self { generation: NonZeroU32::new(g).unwrap() }
25 }
26
27 pub(crate) fn next(self) -> Revision {
28 Self::from(self.generation.get() + 1)
29 }
30
31 fn as_u32(self) -> u32 {
32 self.generation.get()
33 }
34}
35
36impl std::fmt::Debug for Revision {
37 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
38 write!(fmt, "R{}", self.generation)
39 }
40}
41
42#[derive(Debug)]
43pub(crate) struct AtomicRevision {
44 data: AtomicU32,
45}
46
47impl AtomicRevision {
48 pub(crate) const fn start() -> Self {
49 Self { data: AtomicU32::new(START) }
50 }
51
52 pub(crate) fn load(&self) -> Revision {
53 Revision::from(self.data.load(Ordering::SeqCst))
54 }
55
56 pub(crate) fn store(&self, r: Revision) {
57 self.data.store(r.as_u32(), Ordering::SeqCst);
58 }
59
60 pub(crate) fn fetch_then_increment(&self) -> Revision {
62 let v = self.data.fetch_add(1, Ordering::SeqCst);
63 assert!(v != u32::MAX, "revision overflow");
64 Revision::from(v)
65 }
66}