1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
use tokio::task::JoinHandle;
use std::hash::Hash;
use futures::Future;
use thiserror::Error;
use crate::internal_cache::{CacheAction, InternalCacheStore, CacheMessage};
use crate::backing::{CacheBacking, HashMapBacking};
use std::fmt::Debug;

#[derive(Error, Debug)]
pub enum CacheLoadingError<E: Debug> {
    #[error(transparent)]
    CommunicationError(CacheCommunicationError),
    #[error("No data found")]
    NoData(),
    // todo better handling here? eventually return loadingerror if possible
    #[error("An error occurred when loading the entity from the loader function")]
    LoadingError(E),
}

#[derive(Error, Debug)]
pub enum CacheCommunicationError {
    #[error("An error occurred when trying to submit the cache request")]
    TokioMpscSendError(),
    #[error("An error occurred when trying to join the result future")]
    FutureJoinError(#[from] tokio::task::JoinError),
    #[error("An error occurred when waiting for the broadcaster response")]
    TokioBroadcastRecvError(#[from] tokio::sync::broadcast::error::RecvError),
    #[error("An error occurred when receiving the response")]
    TokioOneshotRecvError(#[from] tokio::sync::oneshot::error::RecvError),
    #[error("Lookups are looping, internal error")]
    LookupLoop(),
}

impl<E: Debug> CacheLoadingError<E> {
    pub fn as_loading_error(&self) -> Option<&E> {
        match self {
            CacheLoadingError::LoadingError(error) => Some(error),
            _ => None
        }
    }

    pub fn into_loading_error(self) -> Option<E> {
        match self {
            CacheLoadingError::LoadingError(error) => Some(error),
            _ => None
        }
    }

    pub fn as_communication_error(&self) -> Option<&CacheCommunicationError> {
        match self {
            CacheLoadingError::CommunicationError(error) => Some(error),
            _ => None
        }
    }

    pub fn into_communication_error(self) -> Option<CacheCommunicationError> {
        match self {
            CacheLoadingError::CommunicationError(error) => Some(error),
            _ => None
        }
    }
}

#[derive(Clone)]
pub struct ResultMeta<V> {
    pub result: V,
    pub cached: bool,
}

#[derive(Debug, Clone)]
pub enum CacheEntry<V, E: Debug> {
    Loaded(V),
    Loading(tokio::sync::broadcast::Sender<Result<V, E>>),
}

#[derive(Debug)]
pub enum CacheResult<V, E: Debug> {
    Found(V),
    Loading(JoinHandle<Result<V, CacheLoadingError<E>>>),
    None,
}

#[derive(Debug, Clone)]
pub struct LoadingCache<K, V, E: Debug> {
    tx: tokio::sync::mpsc::Sender<CacheMessage<K, V, E>>
}

impl<
    K: Eq + Hash + Clone + Send + 'static,
    V: Clone + Sized + Send + 'static,
    E: Clone + Sized + Send + Debug + 'static,
> LoadingCache<K, V, E> {
    /// Creates a new instance of a LoadingCache with the default `HashMapBacking`
    ///
    /// # Arguments
    ///
    /// * `loader` - A function which returns a Future<Output=Option<V>>
    ///
    /// # Return Value
    ///
    /// This method returns a tuple, with:
    /// 0 - The instance of the LoadingCache
    /// 1 - The CacheHandle which is a JoinHandle<()> and represents the task which operates
    ///     the cache
    ///
    /// # Examples
    ///
    /// ```
    /// use cache_loader_async::cache_api::LoadingCache;
    /// use std::collections::HashMap;
    /// async fn example() {
    ///     let static_db: HashMap<String, u32> =
    ///         vec![("foo".into(), 32), ("bar".into(), 64)]
    ///             .into_iter()
    ///             .collect();
    ///
    ///     let cache = LoadingCache::new(move |key: String| {
    ///         let db_clone = static_db.clone();
    ///         async move {
    ///             db_clone.get(&key).cloned().ok_or(1)
    ///         }
    ///     });
    ///
    ///     let result = cache.get("foo".to_owned()).await.unwrap();
    ///
    ///     assert_eq!(result, 32);
    /// }
    /// ```
    pub fn new<T, F>(loader: T) -> LoadingCache<K, V, E>
        where F: Future<Output=Result<V, E>> + Sized + Send + 'static,
              T: Fn(K) -> F + Send + 'static {
        LoadingCache::with_backing(HashMapBacking::new(), loader)
    }

    /// Creates a new instance of a LoadingCache with a custom `CacheBacking`
    ///
    /// # Arguments
    ///
    /// * `backing` - The custom backing which the cache should use
    /// * `loader` - A function which returns a Future<Output=Option<V>>
    ///
    /// # Return Value
    ///
    /// This method returns a tuple, with:
    /// 0 - The instance of the LoadingCache
    /// 1 - The CacheHandle which is a JoinHandle<()> and represents the task which operates
    ///     the cache
    ///
    /// # Examples
    ///
    /// ```
    /// use cache_loader_async::cache_api::LoadingCache;
    /// use std::collections::HashMap;
    /// use cache_loader_async::backing::HashMapBacking;
    /// async fn example() {
    ///     let static_db: HashMap<String, u32> =
    ///         vec![("foo".into(), 32), ("bar".into(), 64)]
    ///             .into_iter()
    ///             .collect();
    ///
    ///     let cache = LoadingCache::with_backing(
    ///         HashMapBacking::new(), // this is the default implementation of `new`
    ///         move |key: String| {
    ///             let db_clone = static_db.clone();
    ///             async move {
    ///                 db_clone.get(&key).cloned().ok_or(1)
    ///             }
    ///         }
    ///     );
    ///
    ///     let result = cache.get("foo".to_owned()).await.unwrap();
    ///
    ///     assert_eq!(result, 32);
    /// }
    /// ```
    pub fn with_backing<T, F, B>(backing: B, loader: T) -> LoadingCache<K, V, E>
        where F: Future<Output=Result<V, E>> + Sized + Send + 'static,
              T: Fn(K) -> F + Send + 'static,
              B: CacheBacking<K, CacheEntry<V, E>> + Send + 'static {
        let (tx, rx) = tokio::sync::mpsc::channel(128);
        let store = InternalCacheStore::new(backing, tx.clone(), loader);
        store.run(rx); // we're discarding the handle, we never do unsafe stuff, so it can't error, right?
        LoadingCache {
            tx
        }
    }

    /// Retrieves or loads the value for specified key from either cache or loader function
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be loaded
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Value of type V
    /// Err - Error of type CacheLoadingError
    pub async fn get(&self, key: K) -> Result<V, CacheLoadingError<E>> {
        self.send_cache_action(CacheAction::Get(key)).await
            .map(|opt_result| opt_result.expect("Get should always return either V or CacheLoadingError"))
            .map(|meta| meta.result)
    }

    /// Retrieves or loads the value for specified key from either cache or loader function with
    /// meta information, i.e. if the key was loaded from cache or from the loader function
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be loaded
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Value of type ResultMeta<V>
    /// Err - Error of type CacheLoadingError
    pub async fn get_with_meta(&self, key: K) -> Result<ResultMeta<V>, CacheLoadingError<E>> {
        self.send_cache_action(CacheAction::Get(key)).await
            .map(|opt_result| opt_result.expect("Get should always return either V or CacheLoadingError"))
    }

    /// Sets the value for specified key and bypasses eventual currently ongoing loads
    /// If a key has been set programmatically, eventual concurrent loads will not change
    /// the value of the key.
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be loaded
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Previous value of type V wrapped in an Option depending whether there was a previous
    ///      value
    /// Err - Error of type CacheLoadingError
    pub async fn set(&self, key: K, value: V) -> Result<Option<V>, CacheLoadingError<E>> {
        self.send_cache_action(CacheAction::Set(key, value)).await
            .map(|opt_meta| opt_meta.map(|meta| meta.result))
    }

    /// Loads the value for the specified key from the cache and returns None if not present
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be loaded
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Value of type Option<V>
    /// Err - Error of type CacheLoadingError
    pub async fn get_if_present(&self, key: K) -> Result<Option<V>, CacheLoadingError<E>> {
        self.send_cache_action(CacheAction::GetIfPresent(key)).await
            .map(|opt_meta| opt_meta.map(|meta| meta.result))
    }

    /// Checks whether a specific value is mapped for the given key
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be checked
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - bool
    /// Err - Error of type CacheLoadingError
    pub async fn exists(&self, key: K) -> Result<bool, CacheLoadingError<E>> {
        self.get_if_present(key).await
            .map(|result| result.is_some())
    }

    /// Removes a specific key-value mapping from the cache and returns the previous result
    /// if there was any or None
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be evicted
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Value of type Option<V>
    /// Err - Error of type CacheLoadingError
    pub async fn remove(&self, key: K) -> Result<Option<V>, CacheLoadingError<E>> {
        self.send_cache_action(CacheAction::Remove(key)).await
            .map(|opt_meta| opt_meta.map(|meta| meta.result))
    }

    /// Removes all entries which match the specified predicate
    ///
    /// # Arguments
    ///
    /// * `predicate` - The predicate to test all entries against
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Nothing, the removed values are discarded
    /// Err - Error of type CacheLoadingError -> the values were not discarded
    pub async fn remove_if<P: Fn((&K, Option<&V>)) -> bool + Send + Sync + 'static>(&self, predicate: P) -> Result<(), CacheLoadingError<E>> {
        self.send_cache_action(CacheAction::RemoveIf(Box::new(predicate))).await
            .map(|_| ())
    }

    /// Removes all entries from the underlying backing
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Nothing, the removed values are discarded
    /// Err - Error of type CacheLoadingError -> the values were not discarded
    pub async fn clear(&self) -> Result<(), CacheLoadingError<E>> {
        self.send_cache_action(CacheAction::Clear()).await
            .map(|_| ())
    }

    /// Updates a key on the cache with the given update function and returns the updated value
    ///
    /// If the key is not present yet, it'll be loaded using the loader function and will be
    /// updated once this loader function completes.
    /// In case the key was manually updated via `set` during the loader function the update will
    /// take place on the manually updated value, so user-controlled input takes precedence over
    /// the loader function
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be updated
    /// * `update_fn` - A `FnOnce(V) -> V` which has the current value as parameter and should
    ///                 return the updated value
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Value of type V which is the previously mapped value
    /// Err - Error of type CacheLoadingError
    pub async fn update<U>(&self, key: K, update_fn: U) -> Result<V, CacheLoadingError<E>>
        where U: FnOnce(V) -> V + Send + 'static {
        self.send_cache_action(CacheAction::Update(key, Box::new(update_fn), true)).await
            .map(|opt_result| opt_result.expect("Get should always return either V or CacheLoadingError"))
            .map(|meta| meta.result)
    }

    /// Updates a key on the cache with the given update function and returns the updated value if
    /// it existed
    ///
    /// If the key is not present yet, it'll be ignored. This also counts for keys in LOADING state.
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be updated
    /// * `update_fn` - A `FnOnce(V) -> V` which has the current value as parameter and should
    ///                 return the updated value
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Optional value of type V which is the previously mapped value
    /// Err - Error of type CacheLoadingError
    pub async fn update_if_exists<U>(&self, key: K, update_fn: U) -> Result<Option<V>, CacheLoadingError<E>>
        where U: FnOnce(V) -> V + Send + 'static {
        self.send_cache_action(CacheAction::Update(key, Box::new(update_fn), false)).await
            .map(|opt| opt.map(|meta| meta.result))
    }

    /// Updates a key on the cache with the given update function and returns the updated value
    ///
    /// If the key is not present yet, it'll be loaded using the loader function and will be
    /// updated once this loader function completes.
    /// In case the key was manually updated via `set` during the loader function the update will
    /// take place on the manually updated value, so user-controlled input takes precedence over
    /// the loader function
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be updated
    /// * `update_fn` - A `FnMut(&mut V) -> ()` which has the current value as parameter and should
    ///                 update it accordingly
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Value of type V
    /// Err - Error of type CacheLoadingError
    pub async fn update_mut<U>(&self, key: K, update_fn: U) -> Result<V, CacheLoadingError<E>>
        where U: FnMut(&mut V) -> () + Send + 'static {
        self.send_cache_action(CacheAction::UpdateMut(key, Box::new(update_fn), true)).await
            .map(|opt_result| opt_result.expect("Get should always return either V or CacheLoadingError"))
            .map(|meta| meta.result)
    }

    /// Updates a key on the cache with the given update function and returns the updated value if
    /// it existed
    ///
    /// If the key is not present yet, it'll be ignored.
    /// Keys in LOADING state will still be updated as they get available.
    ///
    /// # Arguments
    ///
    /// * `key` - The key which should be updated
    /// * `update_fn` - A `FnMut(&mut V) -> ()` which has the current value as parameter and should
    ///                 update it accordingly
    ///
    /// # Return Value
    ///
    /// Returns a Result with:
    /// Ok - Optional value of type V
    /// Err - Error of type CacheLoadingError
    pub async fn update_mut_if_exists<U>(&self, key: K, update_fn: U) -> Result<Option<V>, CacheLoadingError<E>>
        where U: FnMut(&mut V) -> () + Send + 'static {
        self.send_cache_action(CacheAction::UpdateMut(key, Box::new(update_fn), false)).await
            .map(|opt| opt.map(|meta| meta.result))
    }

    async fn send_cache_action(&self, action: CacheAction<K, V>) -> Result<Option<ResultMeta<V>>, CacheLoadingError<E>> {
        let (tx, rx) = tokio::sync::oneshot::channel();
        match self.tx.send(CacheMessage {
            action,
            response: tx,
        }).await {
            Ok(_) => {
                match rx.await {
                    Ok(result) => {
                        match result {
                            CacheResult::Found(value) => {
                                Ok(Some(ResultMeta {
                                    result: value,
                                    cached: true,
                                }))
                            }
                            CacheResult::Loading(handle) => {
                                match handle.await {
                                    Ok(load_result) => {
                                        load_result.map(|v| Some(ResultMeta {
                                            result: v,
                                            cached: false,
                                        }))
                                    }
                                    Err(err) => {
                                        Err(CacheLoadingError::CommunicationError(CacheCommunicationError::FutureJoinError(err)))
                                    }
                                }
                            }
                            CacheResult::None => { Ok(None) }
                        }
                    }
                    Err(err) => {
                        Err(CacheLoadingError::CommunicationError(CacheCommunicationError::TokioOneshotRecvError(err)))
                    }
                }
            }
            Err(_) => {
                Err(CacheLoadingError::CommunicationError(CacheCommunicationError::TokioMpscSendError()))
            }
        }
    }
}