json_eval_rs/
parsed_schema_cache.rs

1use crate::ParsedSchema;
2use indexmap::IndexMap;
3/// Built-in cache store for Arc<ParsedSchema> instances
4///
5/// Provides thread-safe caching of parsed schemas with caller-controlled lifecycle:
6/// - Caller provides unique keys
7/// - Caller decides when to re-parse
8/// - Caller controls cache clearing
9/// - Caller manages memory release
10use std::sync::{Arc, RwLock};
11
12/// Thread-safe cache for storing and reusing ParsedSchema instances
13///
14/// # Example
15/// ```
16/// use json_eval_rs::{ParsedSchemaCache, ParsedSchema};
17/// use std::sync::Arc;
18///
19/// let cache = ParsedSchemaCache::new();
20///
21/// // Parse and cache a schema
22/// let schema_json = r#"{"type": "object"}"#;
23/// let schema = ParsedSchema::parse(schema_json).unwrap();
24/// cache.insert("my-schema".to_string(), Arc::new(schema));
25///
26/// // Retrieve from cache
27/// if let Some(cached) = cache.get("my-schema") {
28///     // Use cached schema for evaluation
29/// }
30///
31/// // Remove specific entry
32/// cache.remove("my-schema");
33///
34/// // Clear all entries
35/// cache.clear();
36/// ```
37#[derive(Clone)]
38pub struct ParsedSchemaCache {
39    cache: Arc<RwLock<IndexMap<String, Arc<ParsedSchema>>>>,
40}
41
42impl ParsedSchemaCache {
43    /// Create a new empty cache
44    pub fn new() -> Self {
45        Self {
46            cache: Arc::new(RwLock::new(IndexMap::new())),
47        }
48    }
49
50    /// Insert or update a parsed schema with the given key
51    ///
52    /// Returns the previous value if the key already existed
53    pub fn insert(&self, key: String, schema: Arc<ParsedSchema>) -> Option<Arc<ParsedSchema>> {
54        let mut cache = self.cache.write().unwrap();
55        cache.insert(key, schema)
56    }
57
58    /// Get a cloned Arc reference to the cached schema
59    ///
60    /// Returns None if the key doesn't exist
61    pub fn get(&self, key: &str) -> Option<Arc<ParsedSchema>> {
62        let cache = self.cache.read().unwrap();
63        cache.get(key).cloned()
64    }
65
66    /// Remove and return the schema for the given key
67    ///
68    /// Returns None if the key doesn't exist
69    pub fn remove(&self, key: &str) -> Option<Arc<ParsedSchema>> {
70        let mut cache = self.cache.write().unwrap();
71        cache.shift_remove(key)
72    }
73
74    /// Clear all cached schemas
75    pub fn clear(&self) {
76        let mut cache = self.cache.write().unwrap();
77        cache.clear();
78    }
79
80    /// Check if a key exists in the cache
81    pub fn contains_key(&self, key: &str) -> bool {
82        let cache = self.cache.read().unwrap();
83        cache.contains_key(key)
84    }
85
86    /// Get the number of cached schemas
87    pub fn len(&self) -> usize {
88        let cache = self.cache.read().unwrap();
89        cache.len()
90    }
91
92    /// Check if the cache is empty
93    pub fn is_empty(&self) -> bool {
94        self.len() == 0
95    }
96
97    /// Get all keys currently in the cache
98    pub fn keys(&self) -> Vec<String> {
99        let cache = self.cache.read().unwrap();
100        cache.keys().cloned().collect()
101    }
102
103    /// Get cache statistics
104    pub fn stats(&self) -> ParsedSchemaCacheStats {
105        let cache = self.cache.read().unwrap();
106        ParsedSchemaCacheStats {
107            entry_count: cache.len(),
108            keys: cache.keys().cloned().collect(),
109        }
110    }
111
112    /// Get or insert a schema using a factory function
113    ///
114    /// If the key exists, returns the cached value.
115    /// Otherwise, calls the factory function to create a new value,
116    /// inserts it, and returns it.
117    ///
118    /// # Example
119    /// ```
120    /// use json_eval_rs::{ParsedSchemaCache, ParsedSchema};
121    /// use std::sync::Arc;
122    ///
123    /// let cache = ParsedSchemaCache::new();
124    /// let json = r#"{"type": "object"}"#;
125    /// let schema = cache.get_or_insert_with("my-schema", || {
126    ///     Arc::new(ParsedSchema::parse(json).unwrap())
127    /// });
128    /// ```
129    pub fn get_or_insert_with<F>(&self, key: &str, factory: F) -> Arc<ParsedSchema>
130    where
131        F: FnOnce() -> Arc<ParsedSchema>,
132    {
133        // Try read first (fast path)
134        {
135            let cache = self.cache.read().unwrap();
136            if let Some(schema) = cache.get(key) {
137                return schema.clone();
138            }
139        }
140
141        // Need to insert (slow path)
142        let mut cache = self.cache.write().unwrap();
143        // Double-check in case another thread inserted while we waited for write lock
144        if let Some(schema) = cache.get(key) {
145            return schema.clone();
146        }
147
148        let schema = factory();
149        cache.insert(key.to_string(), schema.clone());
150        schema
151    }
152
153    /// Batch insert multiple schemas at once
154    pub fn insert_batch(&self, entries: Vec<(String, Arc<ParsedSchema>)>) {
155        let mut cache = self.cache.write().unwrap();
156        for (key, schema) in entries {
157            cache.insert(key, schema);
158        }
159    }
160
161    /// Remove multiple keys at once
162    pub fn remove_batch(&self, keys: &[String]) -> Vec<(String, Arc<ParsedSchema>)> {
163        let mut cache = self.cache.write().unwrap();
164        let mut removed = Vec::new();
165        for key in keys {
166            if let Some(schema) = cache.shift_remove(key) {
167                removed.push((key.clone(), schema));
168            }
169        }
170        removed
171    }
172}
173
174impl Default for ParsedSchemaCache {
175    fn default() -> Self {
176        Self::new()
177    }
178}
179
180/// Statistics about the cache state
181#[derive(Debug, Clone)]
182pub struct ParsedSchemaCacheStats {
183    /// Number of entries in the cache
184    pub entry_count: usize,
185    /// List of all keys
186    pub keys: Vec<String>,
187}
188
189impl std::fmt::Display for ParsedSchemaCacheStats {
190    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
191        write!(f, "ParsedSchemaCache: {} entries", self.entry_count)?;
192        if !self.keys.is_empty() {
193            write!(f, " (keys: {})", self.keys.join(", "))?;
194        }
195        Ok(())
196    }
197}
198
199// Optional: Global cache instance for convenience
200use once_cell::sync::Lazy;
201
202/// Global ParsedSchema cache instance
203///
204/// Convenient for applications that want a single global cache
205/// without managing their own instance.
206///
207/// # Example
208/// ```
209/// use json_eval_rs::{PARSED_SCHEMA_CACHE, ParsedSchema};
210/// use std::sync::Arc;
211///
212/// let schema = Arc::new(ParsedSchema::parse(r#"{"type": "object"}"#).unwrap());
213/// PARSED_SCHEMA_CACHE.insert("global-schema".to_string(), schema);
214/// let cached = PARSED_SCHEMA_CACHE.get("global-schema");
215/// ```
216pub static PARSED_SCHEMA_CACHE: Lazy<ParsedSchemaCache> = Lazy::new(ParsedSchemaCache::new);
217
218#[cfg(test)]
219mod tests {
220    use super::*;
221
222    #[test]
223    fn test_cache_basic_operations() {
224        let cache = ParsedSchemaCache::new();
225        assert_eq!(cache.len(), 0);
226        assert!(cache.is_empty());
227
228        // Insert doesn't require a real ParsedSchema for this test
229        // In real usage, you'd use ParsedSchema::from_json
230
231        assert!(!cache.contains_key("test"));
232        assert_eq!(cache.keys().len(), 0);
233    }
234
235    #[test]
236    fn test_cache_clone() {
237        let cache1 = ParsedSchemaCache::new();
238        let cache2 = cache1.clone();
239
240        // Both should share the same underlying cache
241        assert_eq!(cache1.len(), cache2.len());
242    }
243}