vyre_wgpu/runtime/cache/lru/
mod.rs1use rustc_hash::FxHashMap;
2
3pub const DEFAULT_INTRUSIVE_LRU_CAPACITY: usize = 65_536;
5
6pub struct IntrusiveLru<K, V> {
10 nodes: Vec<Node<K, V>>,
11 indices: FxHashMap<K, usize>,
12 free: Vec<usize>,
13 head: Option<usize>,
14 tail: Option<usize>,
15 capacity: usize,
16}
17
18struct Node<K, V> {
19 key: K,
20 value: V,
21 prev: Option<usize>,
22 next: Option<usize>,
23 active: bool,
24}
25
26impl<K, V> IntrusiveLru<K, V>
27where
28 K: std::hash::Hash + Eq + Copy,
29 V: Default,
30{
31 #[inline]
33 pub fn new() -> Self {
34 Self::with_capacity(DEFAULT_INTRUSIVE_LRU_CAPACITY)
35 }
36
37 #[inline]
43 pub fn with_capacity(capacity: usize) -> Self {
44 assert!(
45 capacity > 0,
46 "IntrusiveLru capacity must be non-zero. Fix: configure at least one cache slot."
47 );
48 Self {
49 nodes: Vec::with_capacity(capacity.min(1024)),
50 indices: FxHashMap::default(),
51 free: Vec::new(),
52 head: None,
53 tail: None,
54 capacity,
55 }
56 }
57
58 #[inline]
60 pub fn ensure(&mut self, key: K) -> &mut V {
61 if let Some(&index) = self.indices.get(&key) {
62 return &mut self.nodes[index].value;
63 }
64 let index = self.alloc_node(key);
65 &mut self.nodes[index].value
66 }
67
68 #[inline]
70 pub fn touch(&mut self, key: K) {
71 if let Some(&index) = self.indices.get(&key) {
72 self.move_to_front(index);
73 }
74 }
75
76 #[inline]
78 pub fn remove(&mut self, key: &K) {
79 let Some(index) = self.indices.remove(key) else {
80 return;
81 };
82 self.detach(index);
83 let node = &mut self.nodes[index];
84 node.active = false;
85 self.free.push(index);
86 }
87
88 #[inline]
90 pub fn get(&self, key: &K) -> Option<&V> {
91 let &index = self.indices.get(key)?;
92 let node = &self.nodes[index];
93 node.active.then_some(&node.value)
94 }
95
96 #[inline]
98 pub fn hottest(&self, n: usize) -> Vec<K> {
99 self.iter_hottest().map(|(key, _)| *key).take(n).collect()
100 }
101
102 #[inline]
104 pub fn iter_hottest(&self) -> impl Iterator<Item = (&K, &V)> + '_ {
105 let mut current = self.head;
106 std::iter::from_fn(move || {
107 let index = current?;
108 let node = &self.nodes[index];
109 current = node.next;
110 Some((&node.key, &node.value))
111 })
112 }
113
114 #[inline]
116 pub fn iter_coldest(&self) -> impl Iterator<Item = (&K, &V)> + '_ {
117 let mut current = self.tail;
118 std::iter::from_fn(move || {
119 let index = current?;
120 let node = &self.nodes[index];
121 current = node.prev;
122 Some((&node.key, &node.value))
123 })
124 }
125
126 fn alloc_node(&mut self, key: K) -> usize {
127 if self.indices.len() == self.capacity {
128 if let Some(coldest) = self.tail {
129 let evicted_key = self.nodes[coldest].key;
130 self.remove(&evicted_key);
131 }
132 }
133 let index = if let Some(index) = self.free.pop() {
134 self.nodes[index] = Node {
135 key,
136 value: V::default(),
137 prev: None,
138 next: None,
139 active: true,
140 };
141 index
142 } else {
143 self.nodes.push(Node {
144 key,
145 value: V::default(),
146 prev: None,
147 next: None,
148 active: true,
149 });
150 self.nodes.len() - 1
151 };
152 self.indices.insert(key, index);
153 self.attach_front(index);
154 index
155 }
156
157 fn move_to_front(&mut self, index: usize) {
158 if self.head == Some(index) {
159 return;
160 }
161 self.detach(index);
162 self.attach_front(index);
163 }
164
165 fn attach_front(&mut self, index: usize) {
166 self.nodes[index].prev = None;
167 self.nodes[index].next = self.head;
168 if let Some(head) = self.head {
169 self.nodes[head].prev = Some(index);
170 } else {
171 self.tail = Some(index);
172 }
173 self.head = Some(index);
174 }
175
176 fn detach(&mut self, index: usize) {
177 let prev = self.nodes[index].prev;
178 let next = self.nodes[index].next;
179 if let Some(prev) = prev {
180 self.nodes[prev].next = next;
181 } else if self.head == Some(index) {
182 self.head = next;
183 }
184 if let Some(next) = next {
185 self.nodes[next].prev = prev;
186 } else if self.tail == Some(index) {
187 self.tail = prev;
188 }
189 self.nodes[index].prev = None;
190 self.nodes[index].next = None;
191 }
192}
193
194impl<K, V> Default for IntrusiveLru<K, V>
195where
196 K: std::hash::Hash + Eq + Copy,
197 V: Default,
198{
199 fn default() -> Self {
200 Self::new()
201 }
202}
203
204#[derive(Debug, Clone, Copy, Default)]
206pub struct AccessMeta {
207 pub frequency: u32,
209 pub size: u64,
211 pub last_access: u64,
213}
214
215#[non_exhaustive]
217pub struct AccessTracker {
218 lru: IntrusiveLru<u64, AccessMeta>,
219 tick: u64,
220}
221
222impl AccessTracker {
223 #[inline]
225 pub fn new() -> Self {
226 Self {
227 lru: IntrusiveLru::new(),
228 tick: 0,
229 }
230 }
231
232 #[inline]
234 pub fn record(&mut self, key: u64) {
235 self.tick = self.tick.saturating_add(1);
236 let meta = self.lru.ensure(key);
237 meta.frequency = meta.frequency.saturating_add(1);
238 meta.last_access = self.tick;
239 self.lru.touch(key);
240 }
241
242 #[inline]
244 pub fn hot_set(&self, n: usize) -> Vec<u64> {
245 self.lru.hottest(n)
246 }
247
248 #[inline]
249 pub(crate) fn set_size(&mut self, key: u64, size: u64) {
250 self.lru.ensure(key).size = size;
251 }
252
253 #[inline]
254 pub(crate) fn remove(&mut self, key: u64) {
255 self.lru.remove(&key);
256 }
257
258 #[inline]
259 pub(crate) fn iter_coldest(&self) -> impl Iterator<Item = (&u64, &AccessMeta)> + '_ {
260 self.lru.iter_coldest()
261 }
262
263 #[inline]
264 pub(crate) fn get_meta(&self, key: u64) -> Option<&AccessMeta> {
265 self.lru.get(&key)
266 }
267
268 #[inline]
270 pub fn stats(&self, key: u64) -> Option<crate::runtime::cache::AccessStats> {
271 let meta = self.get_meta(key)?;
272 let recency_rank = self
275 .lru
276 .iter_hottest()
277 .position(|(candidate, _)| *candidate == key)?;
278 Some(crate::runtime::cache::AccessStats {
279 frequency: meta.frequency,
280 recency_rank,
281 size: meta.size,
282 })
283 }
284}
285
286impl Default for AccessTracker {
287 fn default() -> Self {
288 Self::new()
289 }
290}