1use crate::LruCache;
2use crate::entry::EntryPtr;
3
4use std::iter::FusedIterator;
5use std::marker::PhantomData;
6
7pub struct Iter<'a, K, V> {
10 next: EntryPtr<K, V>,
11 next_back: EntryPtr<K, V>,
12 lifetime: PhantomData<&'a ()>
13}
14
15impl<'a, K, V> Iter<'a, K, V> {
16 pub(crate) fn new<S>(cache: &LruCache<K, V, S>) -> Iter<K, V> {
17 if cache.is_empty() {
18 Iter {
19 next: unsafe { EntryPtr::null() },
20 next_back: unsafe { EntryPtr::null() },
21 lifetime: PhantomData
22 }
23 }
24 else {
25 Iter {
26 next: cache.seal.get().prev,
27 next_back: cache.seal.get().next,
28 lifetime: PhantomData
29 }
30 }
31 }
32}
33
34impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
35 type Item = (&'a K, &'a V);
36
37 fn next(&mut self) -> Option<(&'a K, &'a V)> {
38 if self.next.is_null() {
39 None
40 }
41 else {
42 let entry = unsafe { self.next.get_extended() };
43
44 if self.next == self.next_back {
45 self.next = unsafe { EntryPtr::null() };
46 }
47 else {
48 self.next = entry.prev;
49 }
50
51 unsafe { Some((entry.key(), entry.value())) }
52 }
53 }
54}
55
56impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
57 fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
58 if self.next.is_null() {
59 None
60 }
61 else {
62 let entry = unsafe { self.next_back.get_extended() };
63
64 if self.next_back == self.next {
65 self.next = unsafe { EntryPtr::null() };
66 }
67 else {
68 self.next_back = entry.next;
69 }
70
71 unsafe { Some((entry.key(), entry.value())) }
72 }
73 }
74}
75
76impl<'a, K: 'a, V: 'a> FusedIterator for Iter<'a, K, V> { }
77
78pub struct Keys<'a, K, V> {
81 iter: Iter<'a, K, V>
82}
83
84impl<'a, K, V> Keys<'a, K, V> {
85 pub(crate) fn new<S>(cache: &'a LruCache<K, V, S>) -> Keys<'a, K, V> {
86 Keys {
87 iter: Iter::new(cache)
88 }
89 }
90}
91
92impl<'a, K: 'a, V: 'a> Iterator for Keys<'a, K, V> {
93 type Item = &'a K;
94
95 fn next(&mut self) -> Option<&'a K> {
96 self.iter.next().map(|(k, _)| k)
97 }
98}
99
100impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Keys<'a, K, V> {
101 fn next_back(&mut self) -> Option<&'a K> {
102 self.iter.next_back().map(|(k, _)| k)
103 }
104}
105
106impl<'a, K: 'a, V: 'a> FusedIterator for Keys<'a, K, V> { }
107
108pub struct Values<'a, K, V> {
112 iter: Iter<'a, K, V>
113}
114
115impl<'a, K, V> Values<'a, K, V> {
116 pub(crate) fn new<S>(cache: &'a LruCache<K, V, S>) -> Values<'a, K, V> {
117 Values {
118 iter: Iter::new(cache)
119 }
120 }
121}
122
123impl<'a, K: 'a, V: 'a> Iterator for Values<'a, K, V> {
124 type Item = &'a V;
125
126 fn next(&mut self) -> Option<&'a V> {
127 self.iter.next().map(|(_, v)| v)
128 }
129}
130
131impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Values<'a, K, V> {
132 fn next_back(&mut self) -> Option<&'a V> {
133 self.iter.next_back().map(|(_, v)| v)
134 }
135}
136
137impl<'a, K: 'a, V: 'a> FusedIterator for Values<'a, K, V> { }
138
139struct TakingIterator<K, V> {
140 next: EntryPtr<K, V>,
141 next_back: EntryPtr<K, V>,
142}
143
144impl<K, V> TakingIterator<K, V> {
145 fn new<S>(cache: &LruCache<K, V, S>) -> TakingIterator<K, V> {
146 if cache.is_empty() {
147 TakingIterator {
148 next: unsafe { EntryPtr::null() },
149 next_back: unsafe { EntryPtr::null() }
150 }
151 }
152 else {
153 TakingIterator {
154 next: cache.seal.get().prev,
155 next_back: cache.seal.get().next
156 }
157 }
158 }
159}
160
161impl<K, V> Iterator for TakingIterator<K, V> {
162 type Item = (K, V);
163
164 fn next(&mut self) -> Option<(K, V)> {
165 if self.next.is_null() {
166 None
167 }
168 else {
169 unsafe {
170 let entry = self.next.read();
171
172 if self.next == self.next_back {
173 self.next = EntryPtr::null();
174 }
175 else {
176 self.next = entry.prev;
177 }
178
179 Some(entry.into_key_value())
180 }
181 }
182 }
183}
184
185impl<K, V> DoubleEndedIterator for TakingIterator<K, V> {
186 fn next_back(&mut self) -> Option<(K, V)> {
187 if self.next.is_null() {
188 None
189 }
190 else {
191 unsafe {
192 let entry = self.next_back.read();
193
194 if self.next_back == self.next {
195 self.next = EntryPtr::null();
196 }
197 else {
198 self.next_back = entry.next;
199 }
200
201 Some(entry.into_key_value())
202 }
203 }
204 }
205}
206
207pub struct Drain<'a, K, V, S> {
211 iterator: TakingIterator<K, V>,
212 cache: &'a mut LruCache<K, V, S>
213}
214
215impl<'a, K, V, S> Drain<'a, K, V, S> {
216 pub(crate) fn new(cache: &'a mut LruCache<K, V, S>) -> Drain<'a, K, V, S> {
217 Drain {
218 iterator: TakingIterator::new(cache),
219 cache
220 }
221 }
222}
223
224impl<'a, K, V, S> Iterator for Drain<'a, K, V, S> {
225 type Item = (K, V);
226
227 fn next(&mut self) -> Option<(K, V)> {
228 self.iterator.next()
229 }
230}
231
232impl<'a, K, V, S> DoubleEndedIterator for Drain<'a, K, V, S> {
233 fn next_back(&mut self) -> Option<(K, V)> {
234 self.iterator.next_back()
235 }
236}
237
238impl<'a, K, V, S> Drop for Drain<'a, K, V, S> {
239 fn drop(&mut self) {
240 for _ in self.by_ref() { }
243
244 self.cache.seal.get_mut().next = self.cache.seal;
247 self.cache.seal.get_mut().prev = self.cache.seal;
248
249 self.cache.current_size = 0;
250 self.cache.table.clear_no_drop();
251 }
252}
253
254impl<'a, K, V, S> FusedIterator for Drain<'a, K, V, S> { }
255
256pub struct IntoIter<K, V, S> {
260 iterator: TakingIterator<K, V>,
261 cache: LruCache<K, V, S>
262}
263
264impl<K, V, S> IntoIter<K, V, S> {
265 pub(crate) fn new(cache: LruCache<K, V, S>) -> IntoIter<K, V, S> {
266 IntoIter {
267 iterator: TakingIterator::new(&cache),
268 cache
269 }
270 }
271}
272
273impl<K, V, S> Iterator for IntoIter<K, V, S> {
274 type Item = (K, V);
275
276 fn next(&mut self) -> Option<(K, V)> {
277 self.iterator.next()
278 }
279}
280
281impl<K, V, S> DoubleEndedIterator for IntoIter<K, V, S> {
282 fn next_back(&mut self) -> Option<(K, V)> {
283 self.iterator.next_back()
284 }
285}
286
287impl<K, V, S> Drop for IntoIter<K, V, S> {
288 fn drop(&mut self) {
289 for _ in self.by_ref() { }
291
292 self.cache.table.clear_no_drop();
294 }
295}
296
297pub struct IntoKeys<K, V, S> {
301 into_iter: IntoIter<K, V, S>
302}
303
304impl<K, V, S> IntoKeys<K, V, S> {
305 pub(crate) fn new(cache: LruCache<K, V, S>) -> IntoKeys<K, V, S> {
306 IntoKeys {
307 into_iter: IntoIter::new(cache)
308 }
309 }
310}
311
312impl<K, V, S> Iterator for IntoKeys<K, V, S> {
313 type Item = K;
314
315 fn next(&mut self) -> Option<K> {
316 self.into_iter.next().map(|(k, _)| k)
317 }
318}
319
320impl<K, V, S> DoubleEndedIterator for IntoKeys<K, V, S> {
321 fn next_back(&mut self) -> Option<K> {
322 self.into_iter.next_back().map(|(k, _)| k)
323 }
324}
325
326pub struct IntoValues<K, V, S> {
330 into_iter: IntoIter<K, V, S>
331}
332
333impl<K, V, S> IntoValues<K, V, S> {
334 pub(crate) fn new(cache: LruCache<K, V, S>) -> IntoValues<K, V, S> {
335 IntoValues {
336 into_iter: IntoIter::new(cache)
337 }
338 }
339}
340
341impl<K, V, S> Iterator for IntoValues<K, V, S> {
342 type Item = V;
343
344 fn next(&mut self) -> Option<V> {
345 self.into_iter.next().map(|(_, v)| v)
346 }
347}
348
349impl<K, V, S> DoubleEndedIterator for IntoValues<K, V, S> {
350 fn next_back(&mut self) -> Option<V> {
351 self.into_iter.next_back().map(|(_, v)| v)
352 }
353}
354
355#[cfg(test)]
356mod tests {
357
358 use std::sync::{Arc, Mutex};
359
360 use crate::{HeapSize, LruCache};
361 use crate::tests::{large_test_cache, singleton_test_cache};
362
363 #[test]
364 fn iter_works_for_larger_cache() {
365 let cache = large_test_cache();
366 let mut iter = cache.iter();
367
368 assert_eq!(Some((&"hello", &"world")), iter.next());
369 assert_eq!(Some((&"good morning", &"jupiter")), iter.next_back());
370 assert_eq!(Some((&"hi", &"venus")), iter.next_back());
371 assert_eq!(Some((&"ahoy", &"mars")), iter.next_back());
372 assert_eq!(Some((&"greetings", &"moon")), iter.next());
373 assert_eq!(None, iter.next_back());
374 assert_eq!(None, iter.next());
375 }
376
377 #[test]
378 fn forward_iter_works_for_singleton_cache() {
379 let cache = singleton_test_cache();
380 let mut iter = cache.iter();
381
382 assert_eq!(Some((&"hello", &"world")), iter.next());
383 assert_eq!(None, iter.next());
384 }
385
386 #[test]
387 fn backward_iter_works_for_singleton_cache() {
388 let cache = singleton_test_cache();
389 let mut iter = cache.iter();
390
391 assert_eq!(Some((&"hello", &"world")), iter.next_back());
392 assert_eq!(None, iter.next_back());
393 }
394
395 #[test]
396 fn forward_keys_works_for_singleton_cache() {
397 let cache = singleton_test_cache();
398 let mut keys = cache.keys();
399
400 assert_eq!(Some(&"hello"), keys.next());
401 assert_eq!(None, keys.next());
402 }
403
404 #[test]
405 fn backward_keys_works_for_singleton_cache() {
406 let cache = singleton_test_cache();
407 let mut keys = cache.keys();
408
409 assert_eq!(Some(&"hello"), keys.next_back());
410 assert_eq!(None, keys.next_back());
411 }
412
413 #[test]
414 fn forward_values_works_for_singleton_cache() {
415 let cache = singleton_test_cache();
416 let mut values = cache.values();
417
418 assert_eq!(Some(&"world"), values.next());
419 assert_eq!(None, values.next());
420 }
421
422 #[test]
423 fn backward_values_works_for_singleton_cache() {
424 let cache = singleton_test_cache();
425 let mut values = cache.values();
426
427 assert_eq!(Some(&"world"), values.next_back());
428 assert_eq!(None, values.next_back());
429 }
430
431 #[test]
432 fn separated_iters_zip_to_pair_iter() {
433 let cache = large_test_cache();
434 let pair_iter_collected = cache.iter().collect::<Vec<_>>();
435 let zip_iter_collected = cache.keys()
436 .zip(cache.values())
437 .collect::<Vec<_>>();
438
439 assert_eq!(pair_iter_collected, zip_iter_collected);
440 }
441
442 #[test]
443 fn separated_into_iters_zip_to_pair_into_iter() {
444 let cache = large_test_cache();
445 let pair_iter_collected =
446 cache.clone().into_iter().collect::<Vec<_>>();
447 let zip_iter_collected = cache.clone().into_keys()
448 .zip(cache.into_values())
449 .collect::<Vec<_>>();
450
451 assert_eq!(pair_iter_collected, zip_iter_collected);
452 }
453
454 #[test]
455 fn drain_clears_cache() {
456 let mut cache = LruCache::new(1024);
457 cache.insert("hello", "world").unwrap();
458 cache.insert("greetings", "moon").unwrap();
459 cache.drain().next();
460
461 assert_eq!(0, cache.len());
462 assert_eq!(0, cache.current_size());
463 assert!(cache.peek_lru().is_none());
464 assert!(cache.peek_mru().is_none());
465 assert!(cache.get("hello").is_none());
466 }
467
468 fn test_owning_iterator<I>(mut iter: I)
469 where
470 I: Iterator<Item = (&'static str, &'static str)> + DoubleEndedIterator
471 {
472 assert_eq!(Some(("hello", "world")), iter.next());
473 assert_eq!(Some(("good morning", "jupiter")), iter.next_back());
474 assert_eq!(Some(("hi", "venus")), iter.next_back());
475 assert_eq!(Some(("ahoy", "mars")), iter.next_back());
476 assert_eq!(Some(("greetings", "moon")), iter.next());
477 assert_eq!(None, iter.next_back());
478 assert_eq!(None, iter.next());
479 }
480
481 #[test]
482 fn drain_returns_entries() {
483 let mut cache = large_test_cache();
484 let drain = cache.drain();
485 test_owning_iterator(drain);
486 }
487
488 #[test]
489 fn into_iter_returns_entries() {
490 let cache = large_test_cache();
491 let into_iter = cache.into_iter();
492 test_owning_iterator(into_iter);
493 }
494
495 #[test]
496 fn forward_into_iter_works_for_singleton_cache() {
497 let cache = singleton_test_cache();
498 let mut into_iter = cache.into_iter();
499
500 assert_eq!(Some(("hello", "world")), into_iter.next());
501 assert_eq!(None, into_iter.next());
502 }
503
504 #[test]
505 fn backward_into_iter_works_for_singleton_cache() {
506 let cache = singleton_test_cache();
507 let mut into_iter = cache.into_iter();
508
509 assert_eq!(Some(("hello", "world")), into_iter.next_back());
510 assert_eq!(None, into_iter.next_back());
511 }
512
513 #[test]
514 fn forward_into_keys_works_for_singleton_cache() {
515 let cache = singleton_test_cache();
516 let mut into_keys = cache.into_keys();
517
518 assert_eq!(Some("hello"), into_keys.next());
519 assert_eq!(None, into_keys.next());
520 }
521
522 #[test]
523 fn backward_into_keys_works_for_singleton_cache() {
524 let cache = singleton_test_cache();
525 let mut into_keys = cache.into_keys();
526
527 assert_eq!(Some("hello"), into_keys.next_back());
528 assert_eq!(None, into_keys.next_back());
529 }
530
531 #[test]
532 fn forward_into_values_works_for_singleton_cache() {
533 let cache = singleton_test_cache();
534 let mut into_values = cache.into_values();
535
536 assert_eq!(Some("world"), into_values.next());
537 assert_eq!(None, into_values.next());
538 }
539
540 #[test]
541 fn backward_into_values_works_for_singleton_cache() {
542 let cache = singleton_test_cache();
543 let mut into_values = cache.into_values();
544
545 assert_eq!(Some("world"), into_values.next_back());
546 assert_eq!(None, into_values.next_back());
547 }
548
549 #[derive(Debug)]
550 struct DropObserver {
551 drop_counter: Arc<Mutex<u32>>
552 }
553
554 impl HeapSize for DropObserver {
555 fn heap_size(&self) -> usize {
556 0
557 }
558 }
559
560 impl Drop for DropObserver {
561 fn drop(&mut self) {
562 *self.drop_counter.lock().unwrap() += 1;
563 }
564 }
565
566 fn setup_cache_with_drop_counter() -> (LruCache<i32, DropObserver>, Arc<Mutex<u32>>) {
567 let drop_counter = Arc::new(Mutex::new(0));
568 let mut cache = LruCache::new(1024);
569 cache.insert(0, DropObserver { drop_counter: Arc::clone(&drop_counter) })
570 .unwrap();
571 cache.insert(1, DropObserver { drop_counter: Arc::clone(&drop_counter) })
572 .unwrap();
573
574 (cache, drop_counter)
575 }
576
577 #[test]
578 fn into_iter_drops_remaining_elements_if_dropped() {
579 let (cache, drop_counter) = setup_cache_with_drop_counter();
580 let mut into_iter = cache.into_iter();
581 into_iter.next();
582
583 assert_eq!(1, *drop_counter.lock().unwrap());
584
585 drop(into_iter);
586
587 assert_eq!(2, *drop_counter.lock().unwrap());
588 }
589
590 #[test]
591 fn into_keys_drops_remaining_elements_if_dropped() {
592 let (cache, drop_counter) = setup_cache_with_drop_counter();
593 let mut into_keys = cache.into_keys();
594 into_keys.next();
595
596 assert_eq!(1, *drop_counter.lock().unwrap());
597
598 drop(into_keys);
599
600 assert_eq!(2, *drop_counter.lock().unwrap());
601 }
602
603 #[test]
604 fn into_values_drops_remaining_elements_if_dropped() {
605 let (cache, drop_counter) = setup_cache_with_drop_counter();
606 let mut into_values = cache.into_values();
607 into_values.next();
608
609 assert_eq!(1, *drop_counter.lock().unwrap());
610
611 drop(into_values);
612
613 assert_eq!(2, *drop_counter.lock().unwrap());
614 }
615
616 #[test]
617 fn drain_drops_remaining_elements_if_dropped() {
618 let (mut cache, drop_counter) = setup_cache_with_drop_counter();
619 let mut drain = cache.drain();
620 drain.next();
621
622 assert_eq!(1, *drop_counter.lock().unwrap());
623
624 drop(drain);
625
626 assert_eq!(2, *drop_counter.lock().unwrap());
627 assert!(cache.is_empty());
628 }
629
630 fn assert_is_empty<T, I>(mut iterator: I)
631 where
632 I: Iterator<Item = T> + DoubleEndedIterator
633 {
634 assert!(iterator.next().is_none());
635 assert!(iterator.next_back().is_none());
636 }
637
638 #[test]
639 fn empty_cache_builds_empty_iterators() {
640 let mut cache: LruCache<&str, &str> = LruCache::new(1024);
641
642 assert_is_empty(cache.iter());
643 assert_is_empty(cache.keys());
644 assert_is_empty(cache.values());
645 assert_is_empty(cache.drain());
646 assert_is_empty(cache.clone().into_iter());
647 assert_is_empty(cache.clone().into_keys());
648 assert_is_empty(cache.into_values());
649 }
650}