lockable/
lib.rs

1//! The [lockable](https://crates.io/crates/lockable) library offers thread-safe
2//! HashMap (see [LockableHashMap](crate::lockable_hash_map::LockableHashMap)),
3//! LruCache (see [LockableLruCache](crate::lockable_lru_cache::LockableLruCache))
4//! and LockPool (see [LockPool](crate::lockpool::LockPool)) types. In all of these
5//! data types, individual keys can be locked/unlocked, even if there is no entry
6//! for this key in the map or cache.
7//!
8//! This can be very useful for synchronizing access to an underlying key-value
9//! store or for building cache data structures on top of such a key-value store.
10//!
11//! ## LRU cache example
12//! This example builds a simple LRU cache and locks some entries.
13//! ```
14#![cfg_attr(
15    not(feature = "lru"),
16    doc = "```
17```ignore"
18)]
19//! use lockable::{AsyncLimit, LockableLruCache};
20//!
21//! let lockable_cache = LockableLruCache::<i64, String>::new();
22//! # tokio::runtime::Runtime::new().unwrap().block_on(async {
23//!
24//! // Insert an entry
25//! lockable_cache.async_lock(4, AsyncLimit::no_limit())
26//!     .await?
27//!     .insert(String::from("Value"));
28//!
29//! // Hold a lock on a different entry
30//! let guard = lockable_cache.async_lock(5, AsyncLimit::no_limit())
31//!     .await?;
32//!
33//! // This next line would wait until the lock gets released,
34//! // which in this case would cause a deadlock because we're
35//! // on the same thread
36//! // let guard2 = lockable_cache.async_lock(5, AsyncLimit::no_limit())
37//! //    .await?;
38//!
39//! // After dropping the corresponding guard, we can lock it again
40//! std::mem::drop(guard);
41//! let guard2 = lockable_cache.async_lock(5, AsyncLimit::no_limit())
42//!     .await?;
43//! # Ok::<(), lockable::Never>(())}).unwrap();
44//! ```
45//!
46//! ## Lockpool example
47//! This example builds a simple lock pool using the [LockPool](crate::lockpool::LockPool)
48//! data structure. A lock pool is a pool of keyable locks. This can be used if
49//! you don't need a cache but just some way to synchronize access to an underlying
50//! resource.
51//! ```
52//! use lockable::LockPool;
53//!
54//! let lockpool = LockPool::new();
55//! # tokio::runtime::Runtime::new().unwrap().block_on(async {
56//! let guard1 = lockpool.async_lock(4).await;
57//! let guard2 = lockpool.async_lock(5).await;
58//!
59//! // This next line would wait until the lock gets released,
60//! // which in this case would cause a deadlock because we're
61//! // on the same thread.
62//! // let guard3 = lockpool.async_lock(4).await;
63//!
64//! // After dropping the corresponding guard, we can lock it again
65//! std::mem::drop(guard1);
66//! let guard3 = lockpool.async_lock(4).await;
67//! # Ok::<(), lockable::Never>(())}).unwrap();
68//! ```
69//!
70//! ## HashMap example
71//! If you need a lockable key-value store but don't need the LRU ordering,
72//! you can use [LockableHashMap](crate::lockable_hash_map::LockableHashMap).
73//! ```
74//! use lockable::{AsyncLimit, LockableHashMap};
75//!
76//! let lockable_map = LockableHashMap::<i64, String>::new();
77//! # tokio::runtime::Runtime::new().unwrap().block_on(async {
78//!
79//! // Insert an entry
80//! lockable_map.async_lock(4, AsyncLimit::no_limit())
81//!     .await?
82//!     .insert(String::from("Value"));
83//!
84//! // Hold a lock on a different entry
85//! let guard = lockable_map.async_lock(5, AsyncLimit::no_limit())
86//!     .await?;
87//!
88//! // This next line would wait until the lock gets released,
89//! // which in this case would cause a deadlock because we're
90//! // on the same thread
91//! // let guard2 = lockable_map.async_lock(5, AsyncLimit::no_limit())
92//! //    .await?;
93//!
94//! // After dropping the corresponding guard, we can lock it again
95//! std::mem::drop(guard);
96//! let guard2 = lockable_map.async_lock(5, AsyncLimit::no_limit())
97//!     .await?;
98//! # Ok::<(), lockable::Never>(())}).unwrap();
99//! ```
100//!
101//! ## WARNING: Deadlocks
102//! This data structure is powerful and with great power comes great danger (or something like that).
103//! Having concurrent threads or tasks lock keys in an arbitrary order can easily lead to deadlocks
104//! where one thread is waiting for a lock held by another thread, while the second thread is
105//! waiting for a lock held by the first thread.
106//! Be careful and apply common deadlock prevention strategies, e.g. always lock keys in the same order.
107//!
108//! ## Crate Features
109//! - `lru`: Enables the [LockableLruCache](crate::lockable_lru_cache::LockableLruCache)
110//!   type which adds a dependency on the [lru](https://crates.io/crates/lru) crate.
111//! - `slow_assertions`: Enables slow assertions. Don't use this in production code. It is *very* slow.
112//!   This is useful to assert invariants and search for bugs within the `lockable` crate.
113//!   It is not helpful in finding bugs in user code. If you do enable this and encounter an
114//!   assertion failing, please report it in a GitHub issue.
115
116// TODO Figure out which functions actually should or shouldn't be #[inline]
117
118#![forbid(unsafe_code)]
119#![deny(missing_docs)]
120// We need to add explicit links because our `gen_readme.sh` script requires them.
121#![allow(rustdoc::redundant_explicit_links)]
122
123mod guard;
124mod limit;
125mod lockable_map_impl;
126mod lockable_trait;
127mod map_like;
128mod utils;
129
130#[cfg(test)]
131mod tests;
132
133mod lockable_hash_map;
134#[cfg(feature = "lru")]
135mod lockable_lru_cache;
136mod lockpool;
137
138pub use guard::{Guard, TryInsertError};
139pub use limit::{AsyncLimit, SyncLimit};
140pub use lockable_hash_map::LockableHashMap;
141#[cfg(feature = "lru")]
142pub use lockable_lru_cache::LockableLruCache;
143pub use lockable_trait::Lockable;
144pub use lockpool::LockPool;
145pub use utils::never::{InfallibleUnwrap, Never};