velesdb_core/storage/guard.rs
1//! Zero-copy guard for vector data from mmap storage.
2
3use memmap2::MmapMut;
4use parking_lot::RwLockReadGuard;
5
6/// Zero-copy guard for vector data from mmap storage.
7///
8/// This guard holds a read lock on the mmap and provides direct access
9/// to the vector data without any memory allocation or copy.
10///
11/// # Performance
12///
13/// Using `VectorSliceGuard` instead of `retrieve()` eliminates:
14/// - Heap allocation for the result `Vec<f32>`
15/// - Memory copy from mmap to the new vector
16///
17/// # Example
18///
19/// ```rust,ignore
20/// let guard = storage.retrieve_ref(id)?.unwrap();
21/// let slice: &[f32] = guard.as_ref();
22/// // Use slice directly - no allocation occurred
23/// ```
24pub struct VectorSliceGuard<'a> {
25 /// Read guard holding the mmap lock
26 pub(super) _guard: RwLockReadGuard<'a, MmapMut>,
27 /// Pointer to the start of vector data
28 pub(super) ptr: *const f32,
29 /// Number of f32 elements
30 pub(super) len: usize,
31}
32
33// SAFETY: VectorSliceGuard is Send+Sync because:
34// 1. The underlying data is in a memory-mapped file (shared memory)
35// 2. We hold a RwLockReadGuard which ensures exclusive read access
36// 3. The pointer is derived from the guard and valid for its lifetime
37unsafe impl Send for VectorSliceGuard<'_> {}
38unsafe impl Sync for VectorSliceGuard<'_> {}
39
40impl VectorSliceGuard<'_> {
41 /// Returns the vector data as a slice.
42 #[inline]
43 #[must_use]
44 pub fn as_slice(&self) -> &[f32] {
45 // SAFETY: ptr and len were validated during construction,
46 // and the guard ensures the mmap remains valid
47 unsafe { std::slice::from_raw_parts(self.ptr, self.len) }
48 }
49}
50
51impl AsRef<[f32]> for VectorSliceGuard<'_> {
52 #[inline]
53 fn as_ref(&self) -> &[f32] {
54 self.as_slice()
55 }
56}
57
58impl std::ops::Deref for VectorSliceGuard<'_> {
59 type Target = [f32];
60
61 #[inline]
62 fn deref(&self) -> &Self::Target {
63 self.as_slice()
64 }
65}