contiguous_mem/
refs.rs

1//! Returned reference types and read/write guards.
2//!
3//! See [`ContiguousMemoryStorage::push`](crate::ContiguousMemoryStorage::push)
4//! for information on implementation specific return values.
5
6use core::{
7    marker::PhantomData,
8    ops::{Deref, DerefMut},
9};
10
11use crate::{
12    details::{ImplConcurrent, ImplDefault, ImplDetails, StorageDetails},
13    error::{LockSource, LockingError, RegionBorrowedError},
14    range::ByteRange,
15    types::*,
16};
17
18/// A synchronized (thread-safe) reference to `T` data stored in a
19/// [`ContiguousMemoryStorage`](crate::ContiguousMemoryStorage) structure.
20pub struct SyncContiguousEntryRef<T: ?Sized> {
21    pub(crate) inner: Arc<ReferenceState<T, ImplConcurrent>>,
22    #[cfg(feature = "ptr_metadata")]
23    pub(crate) metadata: <T as Pointee>::Metadata,
24    #[cfg(not(feature = "ptr_metadata"))]
25    pub(crate) _phantom: PhantomData<T>,
26}
27
28/// A shorter type name for [`SyncContiguousEntryRef`].
29pub type SCERef<T> = SyncContiguousEntryRef<T>;
30
31impl<T: ?Sized> SyncContiguousEntryRef<T> {
32    /// Returns a byte range within container memory this reference points to.
33    pub fn range(&self) -> ByteRange {
34        self.inner.range
35    }
36
37    /// Returns a reference to data at its current location or returns a
38    /// [`LockingError::Poisoned`](crate::error::LockingError::Poisoned) error
39    /// if the Mutex holding the `base` address pointer has been poisoned.
40    ///
41    /// If the data is mutably accessed, this method will block the current
42    /// thread until it becomes available.
43    pub fn get(&self) -> Result<MemoryReadGuard<'_, T, ImplConcurrent>, LockingError>
44    where
45        T: RefSizeReq,
46    {
47        let guard = self.inner.borrow_kind.read_named(LockSource::Reference)?;
48
49        unsafe {
50            let base = ImplConcurrent::get_base(&self.inner.state.base)?;
51            let pos = base.add(self.inner.range.0);
52
53            Ok(MemoryReadGuard {
54                state: self.inner.clone(),
55                guard,
56                #[cfg(not(feature = "ptr_metadata"))]
57                value: &*(pos as *mut T),
58                #[cfg(feature = "ptr_metadata")]
59                value: &*core::ptr::from_raw_parts(pos as *const (), self.metadata),
60            })
61        }
62    }
63
64    /// Returns a reference to data at its current location or returns a
65    /// [`LockingError::Poisoned`](crate::error::LockingError::Poisoned) error
66    /// if the Mutex holding the `base` address pointer has been poisoned.
67    ///
68    /// If the data is mutably accessed, this method returns a
69    /// [`LockingError::WouldBlock`](crate::error::LockingError::WouldBlock)
70    /// error.
71    pub fn try_get(&self) -> Result<MemoryReadGuard<'_, T, ImplConcurrent>, LockingError>
72    where
73        T: RefSizeReq,
74    {
75        let guard = self
76            .inner
77            .borrow_kind
78            .try_read_named(LockSource::Reference)?;
79
80        unsafe {
81            let base = ImplConcurrent::get_base(&self.inner.state.base)?;
82            let pos = base.add(self.inner.range.0);
83
84            Ok(MemoryReadGuard {
85                state: self.inner.clone(),
86                guard,
87                #[cfg(not(feature = "ptr_metadata"))]
88                value: &*(pos as *mut T),
89                #[cfg(feature = "ptr_metadata")]
90                value: &*core::ptr::from_raw_parts(pos as *const (), self.metadata),
91            })
92        }
93    }
94
95    /// Returns or write guard to referenced data at its current location a
96    /// [`LockingError::Poisoned`] error if the Mutex holding the base address
97    /// pointer or the Mutex holding concurrent mutable access flag has been
98    /// poisoned.
99    pub fn get_mut(&mut self) -> Result<MemoryWriteGuard<'_, T, ImplConcurrent>, LockingError>
100    where
101        T: RefSizeReq,
102    {
103        let guard = self.inner.borrow_kind.write_named(LockSource::Reference)?;
104        unsafe {
105            let base = ImplConcurrent::get_base(&self.inner.state.base)?;
106            let pos = base.add(self.inner.range.0);
107            Ok(MemoryWriteGuard {
108                state: self.inner.clone(),
109                guard,
110                #[cfg(not(feature = "ptr_metadata"))]
111                value: &mut *(pos as *mut T),
112                #[cfg(feature = "ptr_metadata")]
113                value: &mut *core::ptr::from_raw_parts_mut::<T>(pos as *mut (), self.metadata),
114            })
115        }
116    }
117
118    /// Returns a write guard to referenced data at its current location or a
119    /// `LockingError` if that isn't possible.
120    ///
121    /// # Errors
122    ///
123    /// This function can return the following errors:
124    ///
125    /// - [`LockingError::Poisoned`] error if the Mutex holding the base address
126    ///   pointer or the Mutex holding mutable access exclusion flag has been
127    ///   poisoned.
128    ///
129    /// - [`LockingError::WouldBlock`] error if accessing referenced data chunk
130    ///   would be blocking.
131    pub fn try_get_mut(&mut self) -> Result<MemoryWriteGuard<'_, T, ImplConcurrent>, LockingError>
132    where
133        T: RefSizeReq,
134    {
135        let guard = self
136            .inner
137            .borrow_kind
138            .try_write_named(LockSource::Reference)?;
139        unsafe {
140            let base = ImplConcurrent::try_get_base(&self.inner.state.base)?;
141            let pos = base.add(self.inner.range.0);
142            Ok(MemoryWriteGuard {
143                state: self.inner.clone(),
144                guard,
145                #[cfg(not(feature = "ptr_metadata"))]
146                value: &mut *(pos as *mut T),
147                #[cfg(feature = "ptr_metadata")]
148                value: &mut *core::ptr::from_raw_parts_mut::<T>(pos as *mut (), self.metadata),
149            })
150        }
151    }
152
153    /// Casts this reference into a dynamic type `R`.
154    #[cfg(feature = "ptr_metadata")]
155    pub fn into_dyn<R: ?Sized>(self) -> SyncContiguousEntryRef<R>
156    where
157        T: Sized + Unsize<R>,
158    {
159        unsafe {
160            SyncContiguousEntryRef {
161                inner: core::mem::transmute(self.inner),
162                metadata: static_metadata::<T, R>(),
163            }
164        }
165    }
166
167    /// Tries downcasting this dynamic reference into a discrete type `R`,
168    /// returns None if `R` drop handler doesn't match the original one.
169    #[cfg(feature = "ptr_metadata")]
170    pub fn downcast_dyn<R: Unsize<T>>(self) -> Option<SyncContiguousEntryRef<R>> {
171        if self.inner.drop_fn != drop_fn::<R>() {
172            return None;
173        }
174        unsafe {
175            Some(SyncContiguousEntryRef {
176                inner: core::mem::transmute(self.inner),
177                metadata: (),
178            })
179        }
180    }
181
182    /// Transmutes this reference to type `R` with provided `metadata`.
183    ///
184    /// [`static_metadata`](crate::static_metadata) function may be used to
185    /// statically construct metadata for a struct-trait pair.
186    ///
187    /// # Safety
188    ///
189    /// See: [`ContiguousEntryRef::with_metadata`]
190    #[cfg(feature = "ptr_metadata")]
191    pub unsafe fn with_metadata<R: ?Sized>(
192        self,
193        metadata: <R as Pointee>::Metadata,
194    ) -> ContiguousEntryRef<R> {
195        unsafe {
196            ContiguousEntryRef {
197                inner: core::mem::transmute(self.inner),
198                metadata,
199            }
200        }
201    }
202
203    /// Creates an immutable pointer to underlying data, blocking the current
204    /// thread until base address can be read.
205    ///
206    /// This function can return a [`LockingError::Poisoned`] error if the Mutex
207    /// holding the base address pointer has been poisoned.
208    ///
209    /// # Safety
210    ///
211    /// See: [`ContiguousEntryRef::as_ptr`]
212    pub unsafe fn as_ptr(&self) -> Result<*const T, LockingError>
213    where
214        T: RefSizeReq,
215    {
216        self.as_ptr_mut().map(|it| it as *const T)
217    }
218
219    /// Creates a mutable pointer to underlying data, blocking the current
220    /// thread until base address can be read.
221    ///
222    /// This function can return a [`LockingError::Poisoned`] error if the Mutex
223    /// holding the base address pointer has been poisoned.
224    ///
225    /// # Safety
226    ///
227    /// See: [`ContiguousEntryRef::as_ptr_mut`]
228    pub unsafe fn as_ptr_mut(&self) -> Result<*mut T, LockingError>
229    where
230        T: RefSizeReq,
231    {
232        let base = ImplConcurrent::get_base(&self.inner.state.base)?;
233        let pos = base.add(self.inner.range.0);
234        #[cfg(not(feature = "ptr_metadata"))]
235        {
236            Ok(pos as *mut T)
237        }
238        #[cfg(feature = "ptr_metadata")]
239        {
240            Ok(core::ptr::from_raw_parts_mut::<T>(
241                pos as *mut (),
242                self.metadata,
243            ))
244        }
245    }
246
247    /// Creates an immutable pointer to underlying data while also preventing
248    /// the occupied memory region from being marked as free, blocking the
249    /// current thread until base address can be read
250    ///
251    /// This function can return a [`LockingError::Poisoned`] error if the Mutex
252    /// holding the base address pointer has been poisoned.
253    ///
254    /// # Safety
255    ///
256    /// See: [`ContiguousEntryRef::into_ptr`]
257    pub unsafe fn into_ptr(self) -> Result<*const T, LockingError>
258    where
259        T: RefSizeReq,
260    {
261        self.into_ptr_mut().map(|it| it as *const T)
262    }
263
264    /// Creates a mutable pointer to underlying data while also preventing
265    /// the occupied memory region from being marked as free, blocking the
266    /// current thread until base address can be read
267    ///
268    /// This function can return a [`LockingError::Poisoned`] error if the Mutex
269    /// holding the base address pointer has been poisoned.
270    ///
271    /// # Safety
272    ///
273    /// See: [`ContiguousEntryRef::into_ptr_mut`]
274    pub unsafe fn into_ptr_mut(self) -> Result<*mut T, LockingError>
275    where
276        T: RefSizeReq,
277    {
278        let result = self.as_ptr_mut();
279        let inner: *mut ReferenceState<T, ImplConcurrent> = self.inner.as_ref()
280            as *const ReferenceState<T, ImplConcurrent>
281            as *mut ReferenceState<T, ImplConcurrent>;
282        core::ptr::drop_in_place(&mut (*inner).state);
283        core::mem::forget(self.inner);
284        result
285    }
286}
287
288impl<T: ?Sized> EntryRef for SyncContiguousEntryRef<T> {}
289
290impl<T: ?Sized> Clone for SyncContiguousEntryRef<T> {
291    fn clone(&self) -> Self {
292        SyncContiguousEntryRef {
293            inner: self.inner.clone(),
294            #[cfg(feature = "ptr_metadata")]
295            metadata: self.metadata,
296            #[cfg(not(feature = "ptr_metadata"))]
297            _phantom: PhantomData,
298        }
299    }
300}
301
302#[cfg(feature = "debug")]
303impl<T: ?Sized> core::fmt::Debug for SyncContiguousEntryRef<T> {
304    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
305        f.debug_struct("SyncContiguousEntryRef")
306            .field("inner", &self.inner)
307            .finish()
308    }
309}
310
311/// A thread-unsafe reference to `T` data stored in
312/// [`ContiguousMemoryStorage`](crate::ContiguousMemoryStorage) structure.
313pub struct ContiguousEntryRef<T: ?Sized> {
314    pub(crate) inner: Rc<ReferenceState<T, ImplDefault>>,
315    #[cfg(feature = "ptr_metadata")]
316    pub(crate) metadata: <T as Pointee>::Metadata,
317    #[cfg(not(feature = "ptr_metadata"))]
318    pub(crate) _phantom: PhantomData<T>,
319}
320
321/// A shorter type name for [`ContiguousEntryRef`].
322pub type CERef<T> = ContiguousEntryRef<T>;
323
324impl<T: ?Sized> ContiguousEntryRef<T> {
325    /// Returns a byte range within container memory this reference points to.
326    pub fn range(&self) -> ByteRange {
327        self.inner.range
328    }
329
330    /// Returns a reference to data at its current location and panics if the
331    /// represented memory region is mutably borrowed.
332    pub fn get(&self) -> MemoryReadGuard<'_, T, ImplDefault>
333    where
334        T: RefSizeReq,
335    {
336        ContiguousEntryRef::<T>::try_get(self).expect("mutably borrowed")
337    }
338
339    /// Returns a reference to data at its current location or a
340    /// [`RegionBorrowedError`] error if the represented memory region is
341    /// mutably borrowed.
342    pub fn try_get(&self) -> Result<MemoryReadGuard<'_, T, ImplDefault>, RegionBorrowedError>
343    where
344        T: RefSizeReq,
345    {
346        let state = self.inner.borrow_kind.get();
347        if let BorrowState::Read(count) = state {
348            self.inner.borrow_kind.set(BorrowState::Read(count + 1));
349        } else {
350            return Err(RegionBorrowedError {
351                range: self.inner.range,
352            });
353        }
354
355        unsafe {
356            let base = ImplDefault::get_base(&self.inner.state.base);
357            let pos = base.add(self.inner.range.0);
358
359            Ok(MemoryReadGuard {
360                state: self.inner.clone(),
361                guard: (),
362                #[cfg(not(feature = "ptr_metadata"))]
363                value: &*(pos as *mut T),
364                #[cfg(feature = "ptr_metadata")]
365                value: &*core::ptr::from_raw_parts_mut::<T>(pos as *mut (), self.metadata),
366            })
367        }
368    }
369
370    /// Returns a mutable reference to data at its current location and panics
371    /// if the reference has already been borrowed.
372    pub fn get_mut(&mut self) -> MemoryWriteGuard<'_, T, ImplDefault>
373    where
374        T: RefSizeReq,
375    {
376        ContiguousEntryRef::<T>::try_get_mut(self).expect("mutably borrowed")
377    }
378
379    /// Returns a mutable reference to data at its current location or a
380    /// [`RegionBorrowedError`] error if the represented memory region is
381    /// already borrowed.
382    pub fn try_get_mut(
383        &mut self,
384    ) -> Result<MemoryWriteGuard<'_, T, ImplDefault>, RegionBorrowedError>
385    where
386        T: RefSizeReq,
387    {
388        if self.inner.borrow_kind.get() != BorrowState::Read(0) {
389            return Err(RegionBorrowedError {
390                range: self.inner.range,
391            });
392        } else {
393            self.inner.borrow_kind.set(BorrowState::Write);
394        }
395
396        unsafe {
397            let base = ImplDefault::get_base(&self.inner.state.base);
398            let pos = base.add(self.inner.range.0);
399
400            Ok(MemoryWriteGuard {
401                state: self.inner.clone(),
402                guard: (),
403                #[cfg(not(feature = "ptr_metadata"))]
404                value: &mut *(pos as *mut T),
405                #[cfg(feature = "ptr_metadata")]
406                value: &mut *core::ptr::from_raw_parts_mut::<T>(pos as *mut (), self.metadata),
407            })
408        }
409    }
410
411    /// Casts this reference into a dynamic type `R`.
412    #[cfg(feature = "ptr_metadata")]
413    pub fn into_dyn<R: ?Sized>(self) -> ContiguousEntryRef<R>
414    where
415        T: Sized + Unsize<R>,
416    {
417        unsafe {
418            ContiguousEntryRef {
419                inner: core::mem::transmute(self.inner),
420                metadata: static_metadata::<T, R>(),
421            }
422        }
423    }
424
425    /// Tries downcasting this dynamic reference into a discrete type `R`,
426    /// returns None if `R` drop handler doesn't match the original one.
427    #[cfg(feature = "ptr_metadata")]
428    pub fn downcast_dyn<R: Unsize<T>>(self) -> Option<ContiguousEntryRef<R>> {
429        if self.inner.drop_fn != drop_fn::<R>() {
430            return None;
431        }
432        unsafe {
433            Some(ContiguousEntryRef {
434                inner: core::mem::transmute(self.inner),
435                metadata: (),
436            })
437        }
438    }
439
440    /// Transmutes this reference to type `R` with provided `metadata`.
441    ///
442    /// [`static_metadata`](crate::static_metadata) function may be used to
443    /// statically construct metadata for a struct-trait pair.
444    ///
445    /// # Safety
446    ///
447    /// This function is unsafe because it assumes any `T` to implement `R`,
448    /// as the original type of stored data can be erased through
449    /// [`into_dyn`](ContiguousEntryRef::into_dyn) it's impossible to check
450    /// whether the initial struct actually implements `R`.
451    ///
452    /// Calling methods from an incorrect vtable will cause undefined behavior.
453    #[cfg(feature = "ptr_metadata")]
454    pub unsafe fn with_metadata<R: ?Sized>(
455        self,
456        metadata: <R as Pointee>::Metadata,
457    ) -> ContiguousEntryRef<R> {
458        unsafe {
459            ContiguousEntryRef {
460                inner: core::mem::transmute(self.inner),
461                metadata,
462            }
463        }
464    }
465
466    /// Creates an immutable pointer to underlying data.
467    ///
468    /// # Safety
469    ///
470    /// This function returns a pointer that may become invalid if the
471    /// container's memory is resized to a capacity which requires the memory
472    /// segment to be moved.
473    ///
474    /// When the reference goes out of scope, its region will be marked as free
475    /// which means that a subsequent call to [`ContiguousMemoryStorage::push`]
476    /// or friends can cause undefined behavior when dereferencing the pointer.
477    ///
478    /// [`ContiguousMemoryStorage::push`]: crate::ContiguousMemoryStorage::push
479    pub unsafe fn as_ptr(&self) -> *const T
480    where
481        T: RefSizeReq,
482    {
483        self.as_ptr_mut() as *const T
484    }
485
486    /// Creates a mutable pointer to underlying data.
487    ///
488    /// # Safety
489    ///
490    /// In addition to concerns noted in [`ContiguousEntryRef::as_ptr`],
491    /// this function also provides mutable access to the underlying data
492    /// allowing potential data races.
493    pub unsafe fn as_ptr_mut(&self) -> *mut T
494    where
495        T: RefSizeReq,
496    {
497        let base = ImplDefault::get_base(&self.inner.state.base);
498        let pos = base.add(self.inner.range.0);
499
500        #[cfg(not(feature = "ptr_metadata"))]
501        {
502            pos as *mut T
503        }
504        #[cfg(feature = "ptr_metadata")]
505        {
506            core::ptr::from_raw_parts_mut::<T>(pos as *mut (), self.metadata)
507        }
508    }
509
510    /// Creates an immutable pointer to underlying data while also preventing
511    /// the occupied memory region from being marked as free.
512    ///
513    /// # Safety
514    ///
515    /// This function returns a pointer that may become invalid if the
516    /// container's memory is resized to a capacity which requires the memory
517    /// segment to be moved.
518    pub unsafe fn into_ptr(self) -> *const T
519    where
520        T: RefSizeReq,
521    {
522        self.into_ptr_mut() as *const T
523    }
524
525    /// Creates a mutable pointer to underlying data while also preventing
526    /// the occupied memory region from being marked as free.
527    ///
528    /// # Safety
529    ///
530    /// In addition to concerns noted in
531    /// [`ContiguousEntryRef::into_ptr`], this function also provides
532    /// mutable access to the underlying data allowing potential data races.
533    pub unsafe fn into_ptr_mut(self) -> *mut T
534    where
535        T: RefSizeReq,
536    {
537        let result = self.as_ptr_mut();
538        let inner: *mut ReferenceState<T, ImplDefault> = self.inner.as_ref()
539            as *const ReferenceState<T, ImplDefault>
540            as *mut ReferenceState<T, ImplDefault>;
541        core::ptr::drop_in_place(&mut (*inner).state);
542        core::mem::forget(self.inner);
543        result
544    }
545}
546
547impl<T: ?Sized> EntryRef for ContiguousEntryRef<T> {}
548
549impl<T: ?Sized> Clone for ContiguousEntryRef<T> {
550    fn clone(&self) -> Self {
551        ContiguousEntryRef {
552            inner: self.inner.clone(),
553            #[cfg(feature = "ptr_metadata")]
554            metadata: self.metadata,
555            #[cfg(not(feature = "ptr_metadata"))]
556            _phantom: PhantomData,
557        }
558    }
559}
560
561#[cfg(feature = "debug")]
562impl<T: ?Sized> core::fmt::Debug for ContiguousEntryRef<T> {
563    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
564        f.debug_struct("ContiguousEntryRef")
565            .field("inner", &self.inner)
566            .finish()
567    }
568}
569
570pub(crate) mod sealed {
571    use super::*;
572
573    pub trait EntryRef {}
574
575    /// Internal state of [`ContiguousEntryRef`] and [`SyncContiguousEntryRef`].
576    pub struct ReferenceState<T: ?Sized, Impl: ImplDetails> {
577        pub state: Impl::StorageState,
578        pub range: ByteRange,
579        pub borrow_kind: Impl::BorrowLock,
580        pub drop_fn: DropFn,
581        pub _phantom: PhantomData<T>,
582    }
583
584    #[cfg(feature = "debug")]
585    impl<T: ?Sized, Impl: ImplDetails> core::fmt::Debug for ReferenceState<T, Impl>
586    where
587        Impl::StorageState: core::fmt::Debug,
588        Impl::BorrowLock: core::fmt::Debug,
589    {
590        fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
591            f.debug_struct("ReferenceState")
592                .field("state", &self.state)
593                .field("range", &self.range)
594                .field("borrow_kind", &self.borrow_kind)
595                .finish()
596        }
597    }
598
599    impl<T: ?Sized, Impl: ImplDetails> Drop for ReferenceState<T, Impl> {
600        fn drop(&mut self) {
601            let base = Impl::get_base(&Impl::deref_state(&self.state).base);
602            let tracker = Impl::get_allocation_tracker(&mut self.state);
603            if let Some(it) = Impl::free_region(tracker, base, self.range) {
604                (self.drop_fn)(it);
605            };
606        }
607    }
608
609    #[derive(Debug, Clone, Copy, PartialEq, Eq)]
610    pub enum BorrowKind {
611        Read,
612        Write,
613    }
614
615    #[derive(Debug, Clone, Copy, PartialEq, Eq)]
616    pub enum BorrowState {
617        Read(usize),
618        Write,
619    }
620}
621use sealed::*;
622
623/// A smart reference wrapper responsible for tracking and managing a flag
624/// that indicates whether the memory segment is actively being written to.
625#[cfg_attr(feature = "debug", derive(Debug))]
626pub struct MemoryWriteGuard<'a, T: ?Sized, Impl: ImplDetails> {
627    state: Impl::RefState<T>,
628    #[allow(unused)]
629    guard: Impl::WriteGuard<'a>,
630    value: &'a mut T,
631}
632
633impl<'a, T: ?Sized, Impl: ImplDetails> Deref for MemoryWriteGuard<'a, T, Impl> {
634    type Target = T;
635
636    fn deref(&self) -> &Self::Target {
637        self.value
638    }
639}
640
641impl<'a, T: ?Sized, Impl: ImplDetails> DerefMut for MemoryWriteGuard<'a, T, Impl> {
642    fn deref_mut(&mut self) -> &mut Self::Target {
643        self.value
644    }
645}
646
647impl<'a, T: ?Sized, Impl: ImplDetails> Drop for MemoryWriteGuard<'a, T, Impl> {
648    fn drop(&mut self) {
649        Impl::unborrow_ref::<T>(&self.state, BorrowKind::Write);
650    }
651}
652
653/// A smart reference wrapper responsible for tracking and managing a flag
654/// that indicates whether the memory segment is actively being read from.
655#[cfg_attr(feature = "debug", derive(Debug))]
656pub struct MemoryReadGuard<'a, T: ?Sized, Impl: ImplDetails> {
657    state: Impl::RefState<T>,
658    #[allow(unused)]
659    guard: Impl::ReadGuard<'a>,
660    value: &'a T,
661}
662
663impl<'a, T: ?Sized, Impl: ImplDetails> Deref for MemoryReadGuard<'a, T, Impl> {
664    type Target = T;
665
666    fn deref(&self) -> &Self::Target {
667        self.value
668    }
669}
670
671impl<'a, T: ?Sized, Impl: ImplDetails> Drop for MemoryReadGuard<'a, T, Impl> {
672    fn drop(&mut self) {
673        Impl::unborrow_ref::<T>(&self.state, BorrowKind::Read);
674    }
675}