fncache/lib.rs
1#![warn(missing_docs)]
2//! # fncache
3//!
4//! A zero-boilerplate Rust library for function-level caching with pluggable backends, inspired by `functools.lru_cache` and `request-cache`.
5//!
6//! ## Features
7//!
8//! - **Zero Boilerplate**: Simple `#[fncache]` attribute for instant caching
9//! - **Pluggable Backends**: Memory, File, Redis, RocksDB support
10//! - **Async/Sync**: Seamless support for both synchronous and asynchronous functions
11//! - **Type Safety**: Strong typing throughout the caching layer with compile-time guarantees
12//! - **Advanced Metrics**: Built-in instrumentation with latency, hit rates, and size tracking
13//! - **Cache Invalidation**: Tag-based and prefix-based cache invalidation
14//! - **Background Warming**: Proactive cache population for improved performance
15//!
16//! ## Quick Start
17//!
18//! ```ignore
19//! // Example usage (not actually run in tests due to proc-macro limitations)
20//! use fncache::{fncache, init_global_cache, MemoryBackend};
21//!
22//! // Initialize the global cache with an in-memory backend
23//! init_global_cache(MemoryBackend::new()).unwrap();
24//!
25//! #[fncache(ttl = 60)] // Cache for 60 seconds
26//! fn expensive_operation(x: u64) -> u64 {
27//! println!("Performing expensive operation for {}", x);
28//! x * x
29//! }
30//!
31//! fn main() {
32//! // First call executes the function
33//! let result1 = expensive_operation(5);
34//! println!("Result 1: {}", result1); // Takes time
35//!
36//! // Second call returns cached result
37//! let result2 = expensive_operation(5);
38//! println!("Result 2: {}", result2); // Returns immediately
39//! }
40//! ```
41
42use backends::CacheBackend;
43use std::sync::{Mutex, OnceLock};
44
45pub mod backends;
46pub mod error;
47pub mod eviction;
48pub mod invalidation;
49pub mod key_derivation;
50pub mod metrics;
51pub mod serialization;
52pub mod warming;
53
54#[cfg(test)]
55mod invalidation_tests;
56
57#[cfg(test)]
58mod eviction_tests;
59
60#[cfg(test)]
61mod key_derivation_tests;
62
63#[cfg(test)]
64mod metrics_tests;
65
66pub use error::Error as FncacheError;
67
68#[cfg(feature = "wasm")]
69pub use backends::wasm::WasmStorageBackend;
70
71/// Internal structure to hold the cache backend
72#[derive(Debug)]
73pub struct GlobalCache(Box<dyn CacheBackend + Send + Sync>);
74
75#[cfg(not(any(debug_assertions, feature = "test-utils")))]
76static GLOBAL_CACHE: OnceLock<Mutex<GlobalCache>> = OnceLock::new();
77
78#[cfg(any(debug_assertions, feature = "test-utils"))]
79static GLOBAL_CACHE: OnceLock<Mutex<GlobalCache>> = OnceLock::new();
80
81pub use backends::memory::MemoryBackend;
82
83/// Re-export of the proc macro for convenience.
84///
85/// This allows users to write `use fncache::fncache;`
86/// instead of `use fncache_macros::fncache;`.
87///
88/// # Examples
89///
90/// ```ignore
91/// // This example illustrates how to use fncache, but is not actually run in tests
92/// // Import the necessary items
93/// use fncache::{fncache, init_global_cache, MemoryBackend};
94/// use fncache::FncacheError;
95///
96/// // Initialize the cache backend
97/// init_global_cache(MemoryBackend::new()).unwrap();
98///
99/// // Cache the function result for 5 seconds
100/// #[fncache(ttl = 5)]
101/// fn add(a: i32, b: i32) -> i32 {
102/// a + b
103/// }
104///
105/// // For async functions
106/// #[fncache(ttl = 10)]
107/// async fn fetch_data(id: &str) -> std::result::Result<String, FncacheError> {
108/// // Fetch data from some source
109/// Ok(format!("data for {}", id))
110/// }
111/// ```
112#[doc(inline)]
113pub use fncache_macros::fncache;
114
115/// The main cache result type.
116pub type Result<T> = std::result::Result<T, error::Error>;
117
118/// Initialize the global cache with the specified backend.
119///
120/// # Examples
121///
122/// ```no_run
123/// use fncache::{init_global_cache, MemoryBackend};
124///
125/// // Initialize with the in-memory backend
126/// init_global_cache(MemoryBackend::new()).unwrap();
127/// ```
128#[cfg(not(any(debug_assertions, feature = "test-utils")))]
129pub fn init_global_cache<B>(backend: B) -> Result<()>
130where
131 B: CacheBackend + Send + Sync + 'static,
132{
133 let global_cache = GlobalCache(Box::new(backend));
134 GLOBAL_CACHE
135 .set(Mutex::new(global_cache))
136 .map_err(|_| error::Error::AlreadyInitialized)?;
137 Ok(())
138}
139
140/// Initialize the global cache with the specified backend (test version).
141#[cfg(any(debug_assertions, feature = "test-utils"))]
142pub fn init_global_cache<B>(backend: B) -> Result<()>
143where
144 B: CacheBackend + Send + Sync + 'static,
145{
146 let global_cache = GlobalCache(Box::new(backend));
147 GLOBAL_CACHE
148 .set(Mutex::new(global_cache))
149 .map_err(|_| error::Error::AlreadyInitialized)?;
150 Ok(())
151}
152
153/// Get a reference to the global cache.
154///
155/// # Panics
156///
157/// Panics if the global cache has not been initialized.
158#[cfg(not(any(debug_assertions, feature = "test-utils")))]
159pub fn global_cache() -> &'static Mutex<GlobalCache> {
160 GLOBAL_CACHE
161 .get()
162 .expect("Global cache not initialized. Call init_global_cache first.")
163}
164
165/// Get a reference to the global cache (test version).
166///
167/// # Panics
168///
169/// Panics if the global cache has not been initialized.
170#[cfg(any(debug_assertions, feature = "test-utils"))]
171pub fn global_cache() -> &'static Mutex<GlobalCache> {
172 GLOBAL_CACHE
173 .get()
174 .expect("Global cache not initialized. Call init_global_cache first.")
175}
176
177/// Reset the global cache for testing purposes.
178///
179/// This should only be used in tests and never in production code.
180///
181/// Note: Since OnceLock cannot be safely reset, this function is a no-op.
182/// Tests should use unique function names or separate test processes to avoid conflicts.
183/// Available in debug builds and when the "test-utils" feature is enabled.
184#[cfg(any(debug_assertions, feature = "test-utils"))]
185pub fn reset_global_cache_for_testing() {
186 // OnceLock cannot be safely reset once initialized.
187 // This is a no-op to maintain API compatibility.
188 // Tests should use unique function names to avoid cache conflicts.
189}
190
191#[async_trait::async_trait]
192impl CacheBackend for GlobalCache {
193 async fn get(&self, key: &String) -> Result<Option<Vec<u8>>> {
194 self.0.get(key).await
195 }
196
197 async fn set(
198 &self,
199 key: String,
200 value: Vec<u8>,
201 ttl: Option<std::time::Duration>,
202 ) -> Result<()> {
203 self.0.set(key, value, ttl).await
204 }
205
206 async fn remove(&self, key: &String) -> Result<()> {
207 self.0.remove(key).await
208 }
209
210 async fn contains_key(&self, key: &String) -> Result<bool> {
211 self.0.contains_key(key).await
212 }
213
214 async fn clear(&self) -> Result<()> {
215 self.0.clear().await
216 }
217}
218
219/// Common prelude for using the library.
220pub mod prelude {
221 pub use crate::{
222 backends::{Backend, CacheBackend},
223 error::Error,
224 fncache, global_cache, init_global_cache,
225 metrics::Metrics,
226 Result,
227 };
228}
229
230#[cfg(test)]
231mod tests {
232 use super::*;
233 use serial_test::serial;
234
235 #[test]
236 #[serial]
237 fn test_global_cache_initialization() {
238 init_global_cache(MemoryBackend::new()).unwrap();
239 let _cache = global_cache();
240 }
241
242 #[test]
243 #[should_panic(expected = "Global cache not initialized")]
244 #[serial]
245 fn test_global_cache_uninitialized() {
246 static TEST_CACHE: OnceLock<Mutex<GlobalCache>> = OnceLock::new();
247 let _ = TEST_CACHE.get().expect("Global cache not initialized");
248 }
249}