1use std::{
16 cell::UnsafeCell,
17 fmt::Debug,
18 sync::atomic::{AtomicU64, AtomicUsize, Ordering},
19};
20
21use bitflags::bitflags;
22use serde::{Deserialize, Serialize};
23
24use crate::eviction::Eviction;
25
26#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
36pub enum CacheHint {
37 Normal,
39 Low,
43}
44
45bitflags! {
46 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
47 pub struct Flags: u64 {
48 const IN_INDEXER = 0b00000001;
49 const IN_EVICTION = 0b00000010;
50 const EPHEMERAL= 0b00000100;
51 }
52}
53
54pub struct Data<E>
55where
56 E: Eviction,
57{
58 pub key: E::Key,
59 pub value: E::Value,
60 pub hint: E::Hint,
61 pub hash: u64,
62 pub weight: usize,
63}
64
65pub struct Record<E>
67where
68 E: Eviction,
69{
70 data: Data<E>,
71 state: UnsafeCell<E::State>,
72 refs: AtomicUsize,
74 flags: AtomicU64,
75}
76
77unsafe impl<E> Send for Record<E> where E: Eviction {}
78unsafe impl<E> Sync for Record<E> where E: Eviction {}
79
80impl<E> Debug for Record<E>
81where
82 E: Eviction,
83{
84 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
85 f.debug_struct("Record").field("hash", &self.data.hash).finish()
86 }
87}
88
89impl<E> Record<E>
90where
91 E: Eviction,
92{
93 pub const STATE_OFFSET: usize = std::mem::offset_of!(Self, state);
95
96 pub fn new(data: Data<E>) -> Self {
98 Record {
99 data,
100 state: Default::default(),
101 refs: AtomicUsize::new(0),
102 flags: AtomicU64::new(0),
103 }
104 }
105
106 pub fn key(&self) -> &E::Key {
108 &self.data.key
109 }
110
111 pub fn value(&self) -> &E::Value {
113 &self.data.value
114 }
115
116 pub fn hint(&self) -> &E::Hint {
118 &self.data.hint
119 }
120
121 pub fn hash(&self) -> u64 {
123 self.data.hash
124 }
125
126 pub fn weight(&self) -> usize {
128 self.data.weight
129 }
130
131 pub fn state(&self) -> &UnsafeCell<E::State> {
135 &self.state
136 }
137
138 pub fn set_in_eviction(&self, val: bool) {
140 self.set_flags(Flags::IN_EVICTION, val, Ordering::Release);
141 }
142
143 pub fn is_in_eviction(&self) -> bool {
145 self.get_flags(Flags::IN_EVICTION, Ordering::Acquire)
146 }
147
148 pub fn set_in_indexer(&self, val: bool) {
150 self.set_flags(Flags::IN_INDEXER, val, Ordering::Release);
151 }
152
153 pub fn is_in_indexer(&self) -> bool {
155 self.get_flags(Flags::IN_INDEXER, Ordering::Acquire)
156 }
157
158 pub fn set_ephemeral(&self, val: bool) {
160 self.set_flags(Flags::EPHEMERAL, val, Ordering::Release);
161 }
162
163 pub fn is_ephemeral(&self) -> bool {
165 self.get_flags(Flags::EPHEMERAL, Ordering::Acquire)
166 }
167
168 pub fn set_flags(&self, flags: Flags, val: bool, order: Ordering) {
170 match val {
171 true => self.flags.fetch_or(flags.bits(), order),
172 false => self.flags.fetch_and(!flags.bits(), order),
173 };
174 }
175
176 pub fn get_flags(&self, flags: Flags, order: Ordering) -> bool {
178 self.flags.load(order) & flags.bits() == flags.bits()
179 }
180
181 pub fn refs(&self) -> usize {
183 self.refs.load(Ordering::Acquire)
184 }
185
186 pub fn inc_refs(&self, val: usize) -> usize {
190 let old = self.refs.fetch_add(val, Ordering::SeqCst);
191 tracing::trace!(
192 "[record]: inc record (hash: {}) refs: {} => {}",
193 self.hash(),
194 old,
195 old + val
196 );
197 old + val
198 }
199
200 pub fn dec_refs(&self, val: usize) -> usize {
204 let old = self.refs.fetch_sub(val, Ordering::SeqCst);
205 tracing::trace!(
206 "[record]: dec record (hash: {}) refs: {} => {}",
207 self.hash(),
208 old,
209 old - val
210 );
211 old - val
212 }
213}