texture_cache/lib.rs
1//! A LRU texture cache for caching many small textures in a single big texture, which is stored in
2//! GPU. This is used to cache textures that are rendered at runtime and change constantly, like
3//! glyph text rendering.
4//!
5//! ## Usage
6//!
7//! Create a [`LruTextureCache`] and add rects by passing mutable slice of [`RectEntry`] to the
8//! method [`cache_rects`](LruTextureCache::cache_rects). A stored [`Rect`] can be queried from the
9//! cache by passing it `key` to the method `get_rect`. `Rect` and `RectEntry` can contain
10//! arbitrary data that could be useful for rendering from/to the texture cache.
11//!
12//! After passing the slice to `cache_rects`, it will be reorder so that it start with every rect
13//! that was added to the cache. See [`LruTextureCache::cache_rects`] for details.
14//!
15//! ## Example
16//!
17//! ```rust
18//! # fn main() -> Result<(), texture_cache::CacheErr> {
19//! use texture_cache::{LruTextureCache, RectEntry};
20//!
21//! let mut rects = vec![RectEntry {
22//! width: 20,
23//! height: 20,
24//! key: "my_rect",
25//! value: (),
26//! entry_data: (),
27//! }];
28//! let mut cache = LruTextureCache::new(256, 256);
29//! let result = cache.cache_rects(&mut rects)?;
30//!
31//! for rect in &rects[0..result.len()] {
32//! let cached_rect = cache.get_rect(&rect.key);
33//! // Draw the rect to the texture
34//! }
35//! # Ok(())
36//! # }
37//! ```
38
39#![warn(missing_docs)]
40
41use std::collections::HashMap;
42use std::hash::Hash;
43
44#[cfg(test)]
45mod test {
46 use std::fmt::Debug;
47
48 use super::*;
49
50 fn check_overlaps<K: Hash + Eq + Debug, V>(cache: &LruTextureCache<K, V>) {
51 let rects = cache.rows.iter().flat_map(|x| x.rects.iter()).enumerate();
52 let overlap = |a: &Rect<V>, b: &Rect<V>| {
53 a.x < b.x + b.width
54 && b.x < a.x + a.width
55 && a.y < b.y + b.height
56 && b.y < a.y + a.height
57 };
58 for (x, this) in rects.clone() {
59 if this.x + this.width > cache.width || this.y + this.height > cache.height {
60 panic!("rect overflow");
61 }
62 for (y, other) in rects.clone() {
63 if x != y {
64 assert!(!overlap(this, other), "detected overlap");
65 }
66 }
67 }
68
69 let mut values = cache.key_to_row.values().collect::<Vec<_>>();
70 let len = values.len();
71 values.sort();
72 println!("{:?}", values);
73 values.dedup();
74 assert_eq!(values.len(), len, "{:?}", cache.key_to_row);
75 }
76
77 #[test]
78 fn too_big() {
79 let mut cache = LruTextureCache::new(100, 100);
80 assert!(cache.add_rect(150, 50, 0, ()) == AddRectResult::TooLarge);
81 assert!(cache.add_rect(50, 150, 1, ()) == AddRectResult::TooLarge);
82 assert!(cache.add_rect(101, 0, 0, ()) == AddRectResult::TooLarge);
83 }
84
85 #[test]
86 fn row_count() {
87 let mut cache = LruTextureCache::new(100, 100);
88 let mut counter = -1;
89 let mut rects = move || {
90 (0..10)
91 .map(|_| {
92 counter += 1;
93 RectEntry {
94 width: 10,
95 height: 10,
96 key: counter,
97 value: (),
98 entry_data: (),
99 }
100 })
101 .collect::<Vec<_>>()
102 };
103
104 for i in 0..10 {
105 let result = cache.cache_rects(&mut rects());
106 assert_eq!(result, Ok(Cached::Added(10)));
107 check_overlaps(&cache);
108 assert_eq!(cache.rows.len(), i + 1);
109 }
110
111 let result = cache.cache_rects(&mut rects());
112 assert_eq!(result, Ok(Cached::Changed(10)));
113 check_overlaps(&cache);
114 assert_eq!(cache.rows.len(), 10);
115
116 for i in 0..10 {
117 assert!(!cache.contains(&i));
118 }
119 for i in 10..110 {
120 assert!(cache.contains(&i));
121 }
122 }
123
124 #[test]
125 fn random_sample() {
126 use rand::prelude::*;
127 let seed = thread_rng().gen::<u64>();
128 println!("set seed {:016x}", seed);
129 let mut rng = rand::rngs::StdRng::seed_from_u64(seed);
130 let mut gen = || rng.gen_range(1..=10) + rng.gen_range(1..=10);
131 let rects: Vec<_> = (0..1000)
132 .map(|x| RectEntry {
133 width: gen(),
134 height: gen(),
135 key: x,
136 value: (),
137 entry_data: (),
138 })
139 .collect();
140 let mut cache = LruTextureCache::new(100, 100);
141 for i in 0..200 {
142 println!("sample number {}", i);
143 let size = rng.gen_range(25..100);
144 let mut sample = rects.iter().cloned().choose_multiple(&mut rng, size);
145 if cache.cache_rects(&mut sample).is_ok() {
146 for rect in sample.iter() {
147 assert!(cache.contains(&rect.key));
148 }
149 }
150 check_overlaps(&cache);
151 }
152 }
153
154 #[test]
155 #[ignore]
156 /// This test is only used repeat the `random_sample` test, but with a predetermined seed.
157 fn seed_sample() {
158 use rand::prelude::*;
159 let seed = 0xe850b0f0dae9bbd6;
160 println!("set seed {:016x}", seed);
161 let mut rng = rand::rngs::StdRng::seed_from_u64(seed);
162 let mut gen = || rng.gen_range(1..=10) + rng.gen_range(1..=10);
163 let rects: Vec<_> = (0..1000)
164 .map(|x| RectEntry {
165 width: gen(),
166 height: gen(),
167 key: x,
168 value: (),
169 entry_data: (),
170 })
171 .collect();
172 let mut cache = LruTextureCache::new(100, 100);
173 for i in 0..2 {
174 println!("sample number {}", i);
175 let size = rng.gen_range(25..100);
176 let mut sample = rects.iter().cloned().choose_multiple(&mut rng, size);
177 if cache.cache_rects(&mut sample).is_ok() {
178 for rect in sample.iter() {
179 assert!(cache.contains(&rect.key));
180 }
181 }
182 check_overlaps(&cache);
183 }
184 }
185
186 #[test]
187 fn multiple_size() {
188 let mut cache = LruTextureCache::new(100, 100);
189 let i = std::sync::atomic::AtomicU64::new(0);
190 let count = move || i.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
191 let rects = (0..11)
192 .map(|_| RectEntry {
193 width: 15,
194 height: 15,
195 key: count(),
196 value: (),
197 entry_data: (),
198 })
199 .chain((0..3).map(|_| RectEntry {
200 width: 10,
201 height: 10,
202 key: count(),
203 value: (),
204 entry_data: (),
205 }));
206 let mut rects: Vec<_> = rects.collect();
207 cache.cache_rects(&mut rects).unwrap();
208 println!(
209 "{:?}",
210 cache.rows.iter().map(|x| &x.rects).collect::<Vec<_>>()
211 );
212 }
213}
214
215#[derive(Clone, Debug)]
216/// The stored Rect. Contains its dimensions, position and the associated value.
217pub struct Rect<V> {
218 /// The horizontal position, in pixels, of the top left corner of the rect in the texture. 0 is
219 /// the left edge of the texture.
220 pub x: u32,
221 /// The vertical position, in pixels, of the top left corner of the rect in the texture. 0 is
222 /// the top edge of the texture.
223 pub y: u32,
224 /// The width of the rect, in pixels.
225 pub width: u32,
226 /// The height of the rect, in pixels.
227 pub height: u32,
228 /// The value associated with this rect.
229 pub value: V,
230}
231
232struct Row<V> {
233 /// The age of the row since the last time it was used to cache a rect. Increase by one for
234 /// ever call to `cache_rects`. Reset to 0 when store a rect from `cache_rects`.
235 age: u8,
236 /// The position of the top of the row
237 top: u32,
238 /// The height of the row
239 height: u32,
240 /// The space that is not occupied to the right of the last stored rect.
241 free_space: u32,
242 /// The rects stored in this row
243 pub rects: Vec<Rect<V>>,
244}
245impl<V> Row<V> {
246 pub fn push_rect(&mut self, width: u32, height: u32, value: V) {
247 let y = self.top;
248 let x = self.rects.last().map_or(0, |x| x.x + x.width);
249 let rect = Rect {
250 x,
251 y,
252 width,
253 height,
254 value,
255 };
256 self.free_space -= rect.width;
257 self.rects.push(rect);
258 }
259
260 fn len(&self) -> usize {
261 self.rects.len()
262 }
263}
264
265/// The entry of a rect to be cached.
266#[derive(Clone, Hash, PartialEq, Eq)]
267pub struct RectEntry<K: Hash + Eq + Clone, V: Clone, D> {
268 /// The width of the rect to be cached.
269 pub width: u32,
270 /// The height of the rect to be cached.
271 pub height: u32,
272 /// The key that will be mapped to the cached rect.
273 pub key: K,
274 /// A value which will be associated with the cached rect.
275 pub value: V,
276 /// A value it will be associated with this rect entry. This is not stored in the cache, but it
277 /// is used to do operations with this entry right after adding it.
278 pub entry_data: D,
279}
280
281type RowIndex = u32;
282type RectIndex = u32;
283
284/// Successful method of caching of the queue, returned from `cache_rects`.
285#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
286pub enum Cached {
287 /// Added some rects into the cache without affecting any of the already cached rects from
288 /// previous queues. Contains the number of rects that was added to the cache.
289 Added(usize),
290 /// Added some rects into the cache, but removed some glyphs from previous queues. Contains the
291 /// number of rects that was added to the cache.
292 Changed(usize),
293 /// Added all rects into the cache, by clearing all rects from previous queues. Contains the
294 /// number of rects contained in the cache.
295 Cleared(usize),
296}
297impl Cached {
298 /// Return the number of rects that was added to the cached.
299 ///
300 /// This includes rects that was reordered, in the cause of [Cached::Cleared].
301 pub fn len(&self) -> usize {
302 match *self {
303 Self::Added(len) => len,
304 Self::Changed(len) => len,
305 Self::Cleared(len) => len,
306 }
307 }
308}
309
310/// Reason of cache failure.
311#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
312pub enum CacheErr {
313 /// At least one of the queued rects is too big to fit into the cache by itself. Contains the
314 /// number of rects that was added to the cache.
315 RectTooLarge(usize),
316 /// Not all of the requested glyphs fit into the cache, even after clearing all previous cached
317 /// rects. Contains the number of rects that was added to the cache.
318 DontFit(usize),
319}
320impl CacheErr {
321 /// Return the number of rects that was added to the cached.
322 pub fn len(&self) -> usize {
323 match *self {
324 Self::RectTooLarge(len) => len,
325 Self::DontFit(len) => len,
326 }
327 }
328}
329
330#[derive(PartialEq, Eq, Debug)]
331enum AddRectResult {
332 Added,
333 ClearedRows,
334 TooLarge,
335 NoRowToFit,
336}
337
338/// A LRU texture cache to cache rects in a texture.
339///
340///## Algorithm
341/// This works by dividing the texture in rows. When adding a rect it check if it fit in any of the
342/// existing rows. If not, a new row is created, with the height of the added rect. If there is no
343/// space for new rows, a range of unused rows with a height that fit the rect is remove, and a new
344/// row that fit the rect is added. If no row is find, the entire cache is cleared.
345///
346/// When adding multiple rects, all rows that contains any of the already added rects are marked as
347/// used, and the remain are the unused ones.
348pub struct LruTextureCache<K: Hash + Eq, V> {
349 /// The width of the texture.
350 width: u32,
351 /// The height of the texture.
352 height: u32,
353 /// Alllocated rows in the cache.
354 rows: Vec<Row<V>>,
355 /// A map from a rect key to the row and index it is stored.
356 key_to_row: HashMap<K, (RowIndex, RectIndex)>,
357 /// Free space below the bottom row.
358 free_space: u32,
359}
360impl<K: Hash + Eq + Copy + 'static, V: Clone> LruTextureCache<K, V> {
361 /// Create a new empty cache, with the given width and height.
362 pub fn new(width: u32, height: u32) -> Self {
363 Self {
364 width,
365 height,
366 rows: Default::default(),
367 key_to_row: Default::default(),
368 free_space: height,
369 }
370 }
371 /// The width of the cache
372 pub fn width(&self) -> u32 {
373 self.width
374 }
375
376 /// The height of the cache
377 pub fn height(&self) -> u32 {
378 self.height
379 }
380
381 /// The height of the occupied area of the cache.
382 ///
383 /// Cached rects are placed at the top, and grows to bottom, this is the position of the bottom
384 /// of the lowest rect.
385 pub fn min_height(&self) -> u32 {
386 self.height - self.free_space
387 }
388
389 /// The number of rects currently in the cache
390 pub fn len(&self) -> usize {
391 self.key_to_row.len()
392 }
393
394 /// Return true if there is no rect in the cache.
395 pub fn is_empty(&self) -> bool {
396 self.len() == 0
397 }
398
399 /// Clears the cache, leaving it as it if were new.
400 pub fn clear(&mut self) {
401 self.free_space = self.height;
402 self.rows.clear();
403 self.key_to_row.clear();
404 }
405
406 /// Return true if there is a cached rect associated with the given Key. Otherwise, return
407 /// false.
408 pub fn contains(&self, key: &K) -> bool {
409 self.key_to_row.contains_key(key)
410 }
411
412 /// partition the given slice in uncached and cached, and return the index of partition.
413 /// Uncached rects will add a dummy value to the `key_to_row` map.
414 fn partition_cached<D>(&mut self, rects: &mut [RectEntry<K, V, D>]) -> usize {
415 fn partition<T, F: FnMut(&T) -> bool>(slice: &mut [T], mut f: F) -> usize {
416 let mut l = 0;
417 for i in 0..slice.len() {
418 if f(&slice[i]) {
419 slice.swap(i, l);
420 l += 1;
421 }
422 }
423 l
424 }
425 partition(rects, |x| {
426 if !self.key_to_row.contains_key(&x.key) {
427 // insert with a dummy value, this is replace later when the rect is added.
428 self.key_to_row.insert(x.key, (!0, !0));
429 true
430 } else {
431 false
432 }
433 })
434 }
435
436 /// Cache the given slice of rects.
437 ///
438 /// The given slice of rects is reorder so that it start with every rect that was added to the
439 /// cache. This way you can iterate over them by subslicing `rects` (`rects[0..len]`, where
440 /// `len` is [`Cached::len`] or [`CacheErr::len`]) and draw them to the texture cache.
441 ///
442 /// If return `Ok`, it is guaranteed that each one of the given rects will be present in the
443 /// cache after this call, but any rects from previous calls could have been removed.
444 #[must_use]
445 pub fn cache_rects<D>(&mut self, rects: &mut [RectEntry<K, V, D>]) -> Result<Cached, CacheErr> {
446 // Partition the vector in uncached and cached. Be aware of the dummy values.
447 let s = self.partition_cached(rects);
448 // sort uncached by decresing height
449 rects[..s].sort_unstable_by_key(|x| !x.height);
450
451 // Update the `age` of the rows for this batch. Uncached rects will mark the rows when added.
452
453 for row in &mut self.rows {
454 row.age = row.age.saturating_add(1);
455 }
456 for rect in &rects[s..] {
457 let &(row, _) = self.key_to_row.get(&rect.key).unwrap();
458 // be careful with dummy values
459 if row == !0 {
460 continue;
461 }
462 self.rows[row as usize].age = 0;
463 }
464
465 // try add all rects to the cache
466
467 enum Sucess {
468 Okay,
469 Change,
470 Fail,
471 }
472 use AddRectResult::*;
473 use Sucess::*;
474 let mut sucess = Okay;
475 for (r, rect) in rects[..s].iter().enumerate() {
476 match self.add_rect(rect.width, rect.height, rect.key, rect.value.clone()) {
477 Added => {}
478 ClearedRows => sucess = Change,
479 NoRowToFit => {
480 sucess = Fail;
481 break;
482 }
483 TooLarge => {
484 // clear dummy key_to_row values
485 for rect in &rects[r..s] {
486 self.key_to_row.remove(&rect.key);
487 }
488 return Err(CacheErr::RectTooLarge(r));
489 }
490 }
491 }
492
493 match sucess {
494 Okay => Ok(Cached::Added(s)),
495 Change => Ok(Cached::Changed(s)),
496
497 // if the rects don't fit in the cache, clear everthing and try again
498 Fail => {
499 self.clear();
500 // partition the vector in uncached and cached
501 let s = self.partition_cached(rects);
502 // sort uncached by decresing height
503 rects[..s].sort_unstable_by_key(|x| !x.height);
504 for (r, rect) in rects[..s].iter().enumerate() {
505 match self.add_rect(rect.width, rect.height, rect.key, rect.value.clone()) {
506 Added => {}
507 ClearedRows => unreachable!("there are no unused rows"),
508 TooLarge => unreachable!("too large rects would have failed already"),
509 NoRowToFit => {
510 // clear dummy key_to_row values
511 for rect in &rects[r..s] {
512 self.key_to_row.remove(&rect.key);
513 }
514 return Err(CacheErr::DontFit(r));
515 }
516 }
517 }
518 Ok(Cached::Cleared(s))
519 }
520 }
521 }
522
523 /// Return the rect where the texture with the given key is stored in the texture.
524 pub fn get_rect(&self, key: &K) -> Option<Rect<V>> {
525 let (row, index) = *self.key_to_row.get(&key)?;
526 Some(self.rows[row as usize].rects[index as usize].clone())
527 }
528
529 /// Add a rect to a row in the cache, and mark the row as used. This can deallocate any row
530 /// that is not marked as used.
531 fn add_rect(&mut self, width: u32, height: u32, key: K, value: V) -> AddRectResult {
532 if width > self.width || height > self.height {
533 return AddRectResult::TooLarge;
534 }
535 // put in the first rows that fits
536 for (r, row) in self.rows.iter_mut().enumerate() {
537 if row.height >= height && row.free_space >= width {
538 // reborrow because of lifetime issues
539 let row = &mut self.rows[r];
540 row.age = 0;
541 self.key_to_row
542 .insert(key, (r as RowIndex, row.len() as RectIndex));
543 row.push_rect(width, height, value);
544 return AddRectResult::Added;
545 }
546 }
547
548 // if don't fit in any row, add a new one
549 if self.free_space >= height {
550 self.free_space -= height;
551 let mut row = Row {
552 age: 0,
553 top: self.rows.last().map_or(0, |x| x.top + x.height),
554 height,
555 free_space: self.width,
556 rects: Vec::new(),
557 };
558 self.key_to_row
559 .insert(key, (self.rows.len() as RowIndex, 0 as RectIndex));
560 row.push_rect(width, height, value);
561 self.rows.push(row);
562 return AddRectResult::Added;
563 }
564
565 // if there is no space for new rows, clear unused ones to fit the new rect
566
567 // find the best range of consecutive unused rows that fit the rect.
568 // older is better.
569 let mut possible_row = None;
570 let mut best = 0;
571 for r in 0..self.rows.len() {
572 let mut rows_height = 0;
573 // the age of the yonger row
574 let mut age = !0;
575 for o in r..self.rows.len() {
576 let row = &self.rows[o];
577 if row.age == 0 {
578 break;
579 }
580 if row.age < age {
581 if row.age <= best {
582 break;
583 }
584 age = row.age;
585 }
586 rows_height += row.height;
587 if rows_height >= height {
588 possible_row = Some((r..o + 1, rows_height));
589 best = age;
590 break;
591 }
592 }
593 }
594 if let Some((range, row_height)) = possible_row {
595 // TODO: don't clear all three rows, keep the rects from one of them.
596 let top = self.rows[range.start as usize].top;
597 let mut new_row = Row {
598 age: 0,
599 top,
600 height,
601 free_space: self.width,
602 rects: Vec::new(),
603 };
604 new_row.push_rect(width, height, value);
605 let add_len = if row_height == height {
606 self.rows.splice(range.clone(), std::iter::once(new_row));
607 1
608 } else {
609 // create a second row, to fill the gap
610 let split_row = Row {
611 age: !0,
612 top: top + height,
613 height: row_height - height,
614 free_space: self.width,
615 rects: Vec::new(),
616 };
617 self.rows
618 .splice(range.clone(), IntoIterator::into_iter([new_row, split_row]));
619 2
620 };
621
622 // remove and shift rows from key_to_row hash map
623 self.key_to_row.retain(|_, (row, _)| {
624 if (*row as usize) < range.start {
625 true
626 } else if (*row as usize) >= range.end {
627 if *row != !0 {
628 *row = *row + add_len - range.len() as u32;
629 }
630 true
631 } else {
632 false
633 }
634 });
635 self.key_to_row
636 .insert(key, (range.start as RowIndex, 0 as RectIndex));
637 return AddRectResult::ClearedRows;
638 }
639
640 // if there is no row, the operation failed.
641 AddRectResult::NoRowToFit
642 }
643}