Skip to main content

vyre_wgpu/runtime/cache/lru/
mod.rs

1use rustc_hash::FxHashMap;
2
3/// Default maximum number of live nodes retained by an [`IntrusiveLru`].
4pub const DEFAULT_INTRUSIVE_LRU_CAPACITY: usize = 65_536;
5
6/// Intrusive doubly-linked LRU over a slab allocator.
7///
8/// O(1) record, remove, and hottest/coldest iteration.
9pub struct IntrusiveLru<K, V> {
10    nodes: Vec<Node<K, V>>,
11    indices: FxHashMap<K, usize>,
12    free: Vec<usize>,
13    head: Option<usize>,
14    tail: Option<usize>,
15    capacity: usize,
16}
17
18struct Node<K, V> {
19    key: K,
20    value: V,
21    prev: Option<usize>,
22    next: Option<usize>,
23    active: bool,
24}
25
26impl<K, V> IntrusiveLru<K, V>
27where
28    K: std::hash::Hash + Eq + Copy,
29    V: Default,
30{
31    /// Create an LRU with the default live-node capacity.
32    #[inline]
33    pub fn new() -> Self {
34        Self::with_capacity(DEFAULT_INTRUSIVE_LRU_CAPACITY)
35    }
36
37    /// Create an LRU with a fixed live-node capacity.
38    ///
39    /// # Panics
40    ///
41    /// Panics if `capacity` is zero.
42    #[inline]
43    pub fn with_capacity(capacity: usize) -> Self {
44        assert!(
45            capacity > 0,
46            "IntrusiveLru capacity must be non-zero. Fix: configure at least one cache slot."
47        );
48        Self {
49            nodes: Vec::with_capacity(capacity.min(1024)),
50            indices: FxHashMap::default(),
51            free: Vec::new(),
52            head: None,
53            tail: None,
54            capacity,
55        }
56    }
57
58    /// Ensure a node exists for `key` and return a mutable value reference.
59    #[inline]
60    pub fn ensure(&mut self, key: K) -> &mut V {
61        if let Some(&index) = self.indices.get(&key) {
62            return &mut self.nodes[index].value;
63        }
64        let index = self.alloc_node(key);
65        &mut self.nodes[index].value
66    }
67
68    /// Move `key` to the front if it is present.
69    #[inline]
70    pub fn touch(&mut self, key: K) {
71        if let Some(&index) = self.indices.get(&key) {
72            self.move_to_front(index);
73        }
74    }
75
76    /// Remove a key if it is present.
77    #[inline]
78    pub fn remove(&mut self, key: &K) {
79        let Some(index) = self.indices.remove(key) else {
80            return;
81        };
82        self.detach(index);
83        let node = &mut self.nodes[index];
84        node.active = false;
85        self.free.push(index);
86    }
87
88    /// Return the value for `key` if it is currently active.
89    #[inline]
90    pub fn get(&self, key: &K) -> Option<&V> {
91        let &index = self.indices.get(key)?;
92        let node = &self.nodes[index];
93        node.active.then_some(&node.value)
94    }
95
96    /// Return the `n` hottest keys in most-recent-first order.
97    #[inline]
98    pub fn hottest(&self, n: usize) -> Vec<K> {
99        self.iter_hottest().map(|(key, _)| *key).take(n).collect()
100    }
101
102    /// Iterate entries from most recent to least recent.
103    #[inline]
104    pub fn iter_hottest(&self) -> impl Iterator<Item = (&K, &V)> + '_ {
105        let mut current = self.head;
106        std::iter::from_fn(move || {
107            let index = current?;
108            let node = &self.nodes[index];
109            current = node.next;
110            Some((&node.key, &node.value))
111        })
112    }
113
114    /// Iterate entries from least recent to most recent.
115    #[inline]
116    pub fn iter_coldest(&self) -> impl Iterator<Item = (&K, &V)> + '_ {
117        let mut current = self.tail;
118        std::iter::from_fn(move || {
119            let index = current?;
120            let node = &self.nodes[index];
121            current = node.prev;
122            Some((&node.key, &node.value))
123        })
124    }
125
126    fn alloc_node(&mut self, key: K) -> usize {
127        if self.indices.len() == self.capacity {
128            if let Some(coldest) = self.tail {
129                let evicted_key = self.nodes[coldest].key;
130                self.remove(&evicted_key);
131            }
132        }
133        let index = if let Some(index) = self.free.pop() {
134            self.nodes[index] = Node {
135                key,
136                value: V::default(),
137                prev: None,
138                next: None,
139                active: true,
140            };
141            index
142        } else {
143            self.nodes.push(Node {
144                key,
145                value: V::default(),
146                prev: None,
147                next: None,
148                active: true,
149            });
150            self.nodes.len() - 1
151        };
152        self.indices.insert(key, index);
153        self.attach_front(index);
154        index
155    }
156
157    fn move_to_front(&mut self, index: usize) {
158        if self.head == Some(index) {
159            return;
160        }
161        self.detach(index);
162        self.attach_front(index);
163    }
164
165    fn attach_front(&mut self, index: usize) {
166        self.nodes[index].prev = None;
167        self.nodes[index].next = self.head;
168        if let Some(head) = self.head {
169            self.nodes[head].prev = Some(index);
170        } else {
171            self.tail = Some(index);
172        }
173        self.head = Some(index);
174    }
175
176    fn detach(&mut self, index: usize) {
177        let prev = self.nodes[index].prev;
178        let next = self.nodes[index].next;
179        if let Some(prev) = prev {
180            self.nodes[prev].next = next;
181        } else if self.head == Some(index) {
182            self.head = next;
183        }
184        if let Some(next) = next {
185            self.nodes[next].prev = prev;
186        } else if self.tail == Some(index) {
187            self.tail = prev;
188        }
189        self.nodes[index].prev = None;
190        self.nodes[index].next = None;
191    }
192}
193
194impl<K, V> Default for IntrusiveLru<K, V>
195where
196    K: std::hash::Hash + Eq + Copy,
197    V: Default,
198{
199    fn default() -> Self {
200        Self::new()
201    }
202}
203
204/// Metadata attached to each LRU node inside [`AccessTracker`].
205#[derive(Debug, Clone, Copy, Default)]
206pub struct AccessMeta {
207    /// Number of recorded accesses.
208    pub frequency: u32,
209    /// Entry size in bytes.
210    pub size: u64,
211    /// Monotonic tick recorded for the last access.
212    pub last_access: u64,
213}
214
215/// Tracks access patterns for cache entries.
216#[non_exhaustive]
217pub struct AccessTracker {
218    lru: IntrusiveLru<u64, AccessMeta>,
219    tick: u64,
220}
221
222impl AccessTracker {
223    /// Create a new empty tracker.
224    #[inline]
225    pub fn new() -> Self {
226        Self {
227            lru: IntrusiveLru::new(),
228            tick: 0,
229        }
230    }
231
232    /// Record an access for the given key.
233    #[inline]
234    pub fn record(&mut self, key: u64) {
235        self.tick = self.tick.saturating_add(1);
236        let meta = self.lru.ensure(key);
237        meta.frequency = meta.frequency.saturating_add(1);
238        meta.last_access = self.tick;
239        self.lru.touch(key);
240    }
241
242    /// Return the `n` hottest keys in most-recent-first order.
243    #[inline]
244    pub fn hot_set(&self, n: usize) -> Vec<u64> {
245        self.lru.hottest(n)
246    }
247
248    #[inline]
249    pub(crate) fn set_size(&mut self, key: u64, size: u64) {
250        self.lru.ensure(key).size = size;
251    }
252
253    #[inline]
254    pub(crate) fn remove(&mut self, key: u64) {
255        self.lru.remove(&key);
256    }
257
258    #[inline]
259    pub(crate) fn iter_coldest(&self) -> impl Iterator<Item = (&u64, &AccessMeta)> + '_ {
260        self.lru.iter_coldest()
261    }
262
263    #[inline]
264    pub(crate) fn get_meta(&self, key: u64) -> Option<&AccessMeta> {
265        self.lru.get(&key)
266    }
267
268    /// Return access statistics for a key.
269    #[inline]
270    pub fn stats(&self, key: u64) -> Option<crate::runtime::cache::AccessStats> {
271        let meta = self.get_meta(key)?;
272        // Fixes architecture_deep_audit.md#27 verification fallout: promotion
273        // policy reads one typed snapshot instead of reaching through LRU internals.
274        let recency_rank = self
275            .lru
276            .iter_hottest()
277            .position(|(candidate, _)| *candidate == key)?;
278        Some(crate::runtime::cache::AccessStats {
279            frequency: meta.frequency,
280            recency_rank,
281            size: meta.size,
282        })
283    }
284}
285
286impl Default for AccessTracker {
287    fn default() -> Self {
288        Self::new()
289    }
290}