aba_cache/
lib.rs

1//! In-Process In-Memory Cache implementation.
2//!
3//! Supported eviction strategy:
4//! - LRU (Least Recently Used, inspired by [LRU Cache](https://github.com/jeromefroe/lru-rs))
5//! - LFU (Least Frequently Used, TBD)
6//! - MRU (Most Recently Used, TBD)
7//! - FIFO (First In First Out, TBD)
8//!
9//! Supporting async/.await powered by tokio runtime.
10//!
11//! ## Caveat
12//!
13//! If you need to use non-asynchronous, just disable default feature for this crate on your `Cargo.toml`
14//!
15//! If you do need to use reference for your value, on non-asynchronous do use `std::rc::Rc`,
16//! and on asynchronous do use `std::sync::Arc`
17//!
18//! ## Example basic
19//!
20//! ### Cargo.toml
21//!
22//! ```toml
23//! [dependencies]
24//! aba-cache = { version = "0.1", default-features = false }
25//! serde_json = { version = "1.0" }
26//! ```
27//!
28//! ### main.rs
29//!
30//! ```rust,no_run
31//! use aba_cache as cache;
32//! use cache::LruCache;
33//! use serde_json::{self, Value};
34//! use std::rc::Rc;
35//!
36//! // create cache with multiply_cap 1024, and evicting
37//! // LRU entry with age 15 minutes or older
38//! let mut cache = LruCache::new(1024, 15 * 60);
39//!
40//! let val_one: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":1}"#).unwrap());
41//! let val_two: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":2}"#).unwrap());
42//!
43//! cache.put("one", val_one.clone());
44//! cache.put("two", val_two.clone());
45//!
46//! assert!(
47//!     if let Some(value) = cache.get(&"one") {
48//!         *value == val_one
49//!     } else {
50//!         false
51//!     }
52//! );
53//! assert_eq!(cache.get(&"three"), None);
54//! ```
55//!
56//! ## Example async
57//!
58//! ### Cargo.toml
59//!
60//! ```toml
61//! aba-cache = { version = "0.1", default-features = false }
62//! serde_json = { version = "1.0" }
63//! tokio = { version = "0.2", features = ["macros", "rt-core"] }
64//! ```
65//!
66//! ### main.rs
67//!
68//! ```rust,no_run
69//! use aba_cache as cache;
70//! use cache::LruAsyncCache;
71//! use serde_json::{self, Value};
72//! use std::sync::Arc;
73//!
74//! #[tokio::main]
75//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
76//!     // create cache with multiply_cap 1024, and evicting
77//!     // LRU entry with age 15 minutes or older
78//!     let cache = LruAsyncCache::new(1024, 15 * 60);
79//!
80//!     let val_one: Arc<Value> = Arc::new(serde_json::from_str(r#"{"id":1}"#)?);
81//!     let val_two: Arc<Value> = Arc::new(serde_json::from_str(r#"{"id":2}"#)?);
82//!
83//!     cache.put("one", val_one.clone()).await;
84//!     cache.put("two", val_two.clone()).await;
85//!
86//!     assert!(
87//!         if let Some(value) = cache.get(&"one").await {
88//!             value == val_one
89//!         } else {
90//!             false
91//!         }
92//!     );
93//!     assert_eq!(cache.get(&"three").await, None);
94//!
95//!     Ok(())
96//! }
97//! ```
98mod lru;
99
100#[cfg(feature = "asynchronous")]
101pub use lru::asynchronous::Cache as LruAsyncCache;
102pub use lru::Cache as LruCache;