1use std::num::NonZeroU32;
2use std::sync::atomic::{AtomicU32, Ordering};
3
4const START: u32 = 1;
7
8#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
14pub struct Revision {
15 generation: NonZeroU32,
16}
17
18impl Revision {
19 pub(crate) fn start() -> Self {
20 Self::from(START)
21 }
22
23 pub(crate) fn from(g: u32) -> Self {
24 Self {
25 generation: NonZeroU32::new(g).unwrap(),
26 }
27 }
28
29 pub(crate) fn next(self) -> Revision {
30 Self::from(self.generation.get() + 1)
31 }
32
33 fn as_u32(self) -> u32 {
34 self.generation.get()
35 }
36}
37
38impl std::fmt::Debug for Revision {
39 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
40 write!(fmt, "R{}", self.generation)
41 }
42}
43
44#[derive(Debug)]
45pub(crate) struct AtomicRevision {
46 data: AtomicU32,
47}
48
49impl AtomicRevision {
50 pub(crate) fn start() -> Self {
51 Self {
52 data: AtomicU32::new(START),
53 }
54 }
55
56 pub(crate) fn load(&self) -> Revision {
57 Revision::from(self.data.load(Ordering::SeqCst))
58 }
59
60 pub(crate) fn store(&self, r: Revision) {
61 self.data.store(r.as_u32(), Ordering::SeqCst);
62 }
63
64 pub(crate) fn fetch_then_increment(&self) -> Revision {
66 let v = self.data.fetch_add(1, Ordering::SeqCst);
67 assert!(v != u32::max_value(), "revision overflow");
68 Revision::from(v)
69 }
70}