pub struct MemoryCache { /* private fields */ }
Expand description
High-performance in-memory cache implementation.
MemoryCache
provides fast, local caching with automatic expiration support.
It’s ideal for single-instance applications or when you need very low latency
cache access. The cache is thread-safe and supports concurrent reads and writes.
§Features
- Thread-safe: Safe for concurrent access from multiple threads
- TTL Support: Automatic expiration of cache entries
- Memory efficient: Expired entries are cleaned up automatically
- Fast access: O(1) average case for get/set operations
- Flexible TTL: Per-entry TTL or default TTL for all entries
§Examples
§Basic Usage
use torch_web::cache::MemoryCache;
use std::time::Duration;
let cache = MemoryCache::new(Some(Duration::from_secs(300))); // 5 minute default TTL
// Set a value with default TTL
cache.set("user:123", "John Doe", None).await;
// Set a value with custom TTL
cache.set("session:abc", "active", Some(Duration::from_secs(3600))).await;
// Get a value
if let Some(name) = cache.get("user:123").await {
println!("User name: {}", name);
}
§With JSON Serialization
use torch_web::cache::MemoryCache;
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize)]
struct User {
id: u32,
name: String,
email: String,
}
let cache = MemoryCache::new(None); // No default TTL
let user = User {
id: 123,
name: "John Doe".to_string(),
email: "john@example.com".to_string(),
};
// Serialize and cache
let user_json = serde_json::to_string(&user)?;
cache.set("user:123", &user_json, Some(Duration::from_secs(3600))).await;
// Retrieve and deserialize
if let Some(cached_json) = cache.get("user:123").await {
let cached_user: User = serde_json::from_str(&cached_json)?;
println!("Cached user: {}", cached_user.name);
}
§Cache Invalidation
use torch_web::cache::MemoryCache;
let cache = MemoryCache::new(None);
// Set some values
cache.set("key1", "value1", None).await;
cache.set("key2", "value2", None).await;
// Remove a specific key
cache.delete("key1").await;
// Clear all entries
cache.clear().await;
Implementations§
Source§impl MemoryCache
impl MemoryCache
pub fn new(default_ttl: Option<Duration>) -> Self
pub async fn get(&self, key: &str) -> Option<String>
pub async fn set( &self, key: &str, value: &str, ttl: Option<Duration>, ) -> Result<(), Box<dyn Error>>
pub async fn delete(&self, key: &str) -> Result<bool, Box<dyn Error>>
pub async fn clear(&self) -> Result<(), Box<dyn Error>>
pub async fn cleanup_expired(&self) -> Result<usize, Box<dyn Error>>
pub async fn size(&self) -> usize
Trait Implementations§
Source§impl Cache for MemoryCache
impl Cache for MemoryCache
fn get( &self, key: &str, ) -> Pin<Box<dyn Future<Output = Option<String>> + Send + '_>>
fn set( &self, key: &str, value: &str, ttl: Option<Duration>, ) -> Pin<Box<dyn Future<Output = Result<(), Box<dyn Error>>> + Send + '_>>
fn delete( &self, key: &str, ) -> Pin<Box<dyn Future<Output = Result<bool, Box<dyn Error>>> + Send + '_>>
Auto Trait Implementations§
impl Freeze for MemoryCache
impl !RefUnwindSafe for MemoryCache
impl Send for MemoryCache
impl Sync for MemoryCache
impl Unpin for MemoryCache
impl !UnwindSafe for MemoryCache
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more