1use std::cell::UnsafeCell;
2use std::fmt::Debug;
3
4use crate::lockable::{Lockable, LockableIntoInner, OwnedLockable, RawLock, Sharable};
5use crate::{Keyable, ThreadKey};
6
7use super::utils::{
8 ordered_contains_duplicates, scoped_read, scoped_try_read, scoped_try_write, scoped_write,
9};
10use super::{utils, BoxedLockCollection, LockGuard};
11
12unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
13 #[mutants::skip] #[cfg(not(tarpaulin_include))]
15 fn poison(&self) {
16 for lock in &self.locks {
17 lock.poison();
18 }
19 }
20
21 unsafe fn raw_write(&self) {
22 utils::ordered_write(self.locks())
23 }
24
25 unsafe fn raw_try_write(&self) -> bool {
26 utils::ordered_try_write(self.locks())
27 }
28
29 unsafe fn raw_unlock_write(&self) {
30 for lock in self.locks() {
31 lock.raw_unlock_write();
32 }
33 }
34
35 unsafe fn raw_read(&self) {
36 utils::ordered_read(self.locks());
37 }
38
39 unsafe fn raw_try_read(&self) -> bool {
40 utils::ordered_try_read(self.locks())
41 }
42
43 unsafe fn raw_unlock_read(&self) {
44 for lock in self.locks() {
45 lock.raw_unlock_read();
46 }
47 }
48}
49
50unsafe impl<L: Lockable> Lockable for BoxedLockCollection<L> {
51 type Guard<'g>
52 = L::Guard<'g>
53 where
54 Self: 'g;
55
56 type DataMut<'a>
57 = L::DataMut<'a>
58 where
59 Self: 'a;
60
61 fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
62 ptrs.extend_from_slice(&self.locks);
66 }
67
68 unsafe fn guard(&self) -> Self::Guard<'_> {
69 self.child().guard()
70 }
71
72 unsafe fn data_mut(&self) -> Self::DataMut<'_> {
73 self.child().data_mut()
74 }
75}
76
77unsafe impl<L: Sharable> Sharable for BoxedLockCollection<L> {
78 type ReadGuard<'g>
79 = L::ReadGuard<'g>
80 where
81 Self: 'g;
82
83 type DataRef<'a>
84 = L::DataRef<'a>
85 where
86 Self: 'a;
87
88 unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
89 self.child().read_guard()
90 }
91
92 unsafe fn data_ref(&self) -> Self::DataRef<'_> {
93 self.child().data_ref()
94 }
95}
96
97unsafe impl<L: OwnedLockable> OwnedLockable for BoxedLockCollection<L> {}
98
99impl<L: LockableIntoInner> LockableIntoInner for BoxedLockCollection<L> {
103 type Inner = L::Inner;
104
105 fn into_inner(self) -> Self::Inner {
106 LockableIntoInner::into_inner(self.into_child())
107 }
108}
109
110impl<L> IntoIterator for BoxedLockCollection<L>
111where
112 L: IntoIterator,
113{
114 type Item = <L as IntoIterator>::Item;
115 type IntoIter = <L as IntoIterator>::IntoIter;
116
117 fn into_iter(self) -> Self::IntoIter {
118 self.into_child().into_iter()
119 }
120}
121
122impl<'a, L> IntoIterator for &'a BoxedLockCollection<L>
123where
124 &'a L: IntoIterator,
125{
126 type Item = <&'a L as IntoIterator>::Item;
127 type IntoIter = <&'a L as IntoIterator>::IntoIter;
128
129 fn into_iter(self) -> Self::IntoIter {
130 self.child().into_iter()
131 }
132}
133
134impl<L: OwnedLockable, I: FromIterator<L> + OwnedLockable> FromIterator<L>
135 for BoxedLockCollection<I>
136{
137 fn from_iter<T: IntoIterator<Item = L>>(iter: T) -> Self {
138 let iter: I = iter.into_iter().collect();
139 Self::new(iter)
140 }
141}
142
143#[allow(clippy::non_send_fields_in_send_ty)]
145unsafe impl<L: Send> Send for BoxedLockCollection<L> {}
146unsafe impl<L: Sync> Sync for BoxedLockCollection<L> {}
147
148impl<L> Drop for BoxedLockCollection<L> {
149 #[mutants::skip] #[cfg(not(tarpaulin_include))]
151 fn drop(&mut self) {
152 unsafe {
153 self.locks.clear();
155 let boxed: Box<UnsafeCell<L>> = Box::from_raw(self.data.cast_mut());
157
158 drop(boxed)
159 }
160 }
161}
162
163impl<T: ?Sized, L: AsRef<T>> AsRef<T> for BoxedLockCollection<L> {
164 fn as_ref(&self) -> &T {
165 self.child().as_ref()
166 }
167}
168
169#[mutants::skip]
170#[cfg(not(tarpaulin_include))]
171impl<L: Debug> Debug for BoxedLockCollection<L> {
172 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
173 f.debug_struct(stringify!(BoxedLockCollection))
174 .field("data", &self.data)
175 .finish_non_exhaustive()
177 }
178}
179
180impl<L: OwnedLockable + Default> Default for BoxedLockCollection<L> {
181 fn default() -> Self {
182 Self::new(L::default())
183 }
184}
185
186impl<L: OwnedLockable> From<L> for BoxedLockCollection<L> {
187 fn from(value: L) -> Self {
188 Self::new(value)
189 }
190}
191
192impl<L> BoxedLockCollection<L> {
193 #[must_use]
212 pub fn into_child(mut self) -> L {
213 unsafe {
214 std::ptr::drop_in_place(&mut self.locks);
216 let boxed: Box<UnsafeCell<L>> = Box::from_raw(self.data.cast_mut());
218 std::mem::forget(self);
220
221 boxed.into_inner()
222 }
223 }
224
225 #[must_use]
247 pub fn child(&self) -> &L {
248 unsafe {
249 self.data
250 .as_ref()
251 .unwrap_unchecked()
252 .get()
253 .as_ref()
254 .unwrap_unchecked()
255 }
256 }
257
258 fn locks(&self) -> &[&dyn RawLock] {
260 &self.locks
261 }
262}
263
264impl<L: OwnedLockable> BoxedLockCollection<L> {
265 #[must_use]
279 pub fn new(data: L) -> Self {
280 unsafe { Self::new_unchecked(data) }
282 }
283}
284
285impl<'a, L: OwnedLockable> BoxedLockCollection<&'a L> {
286 #[must_use]
300 pub fn new_ref(data: &'a L) -> Self {
301 unsafe { Self::new_unchecked(data) }
303 }
304}
305
306impl<L: Lockable> BoxedLockCollection<L> {
307 #[must_use]
327 pub unsafe fn new_unchecked(data: L) -> Self {
328 let data = Box::leak(Box::new(UnsafeCell::new(data)));
329 let data_ref = data.get().cast_const().as_ref().unwrap_unchecked();
330
331 let mut locks = Vec::new();
332 data_ref.get_ptrs(&mut locks);
333
334 locks.sort_by_key(|lock| (&raw const **lock).cast::<()>() as usize);
336
337 let locks: Vec<&'static dyn RawLock> = std::mem::transmute(locks);
339 let data = &raw const *data;
340 Self { data, locks }
341 }
342
343 #[must_use]
361 pub fn try_new(data: L) -> Option<Self> {
362 unsafe {
364 let this = Self::new_unchecked(data);
365 if ordered_contains_duplicates(this.locks()) {
366 return None;
367 }
368 Some(this)
369 }
370 }
371
372 pub fn scoped_lock<'a, R>(&'a self, key: impl Keyable, f: impl Fn(L::DataMut<'a>) -> R) -> R {
373 scoped_write(self, key, f)
374 }
375
376 pub fn scoped_try_lock<'a, Key: Keyable, R>(
377 &'a self,
378 key: Key,
379 f: impl Fn(L::DataMut<'a>) -> R,
380 ) -> Result<R, Key> {
381 scoped_try_write(self, key, f)
382 }
383
384 #[must_use]
404 pub fn lock(&self, key: ThreadKey) -> LockGuard<L::Guard<'_>> {
405 unsafe {
406 self.raw_write();
408
409 LockGuard {
410 guard: self.child().guard(),
412 key,
413 }
414 }
415 }
416
417 pub fn try_lock(&self, key: ThreadKey) -> Result<LockGuard<L::Guard<'_>>, ThreadKey> {
447 let guard = unsafe {
448 if !self.raw_try_write() {
449 return Err(key);
450 }
451
452 self.child().guard()
454 };
455
456 Ok(LockGuard { guard, key })
457 }
458
459 pub fn unlock(guard: LockGuard<L::Guard<'_>>) -> ThreadKey {
477 drop(guard.guard);
478 guard.key
479 }
480}
481
482impl<L: Sharable> BoxedLockCollection<L> {
483 pub fn scoped_read<'a, R>(&'a self, key: impl Keyable, f: impl Fn(L::DataRef<'a>) -> R) -> R {
484 scoped_read(self, key, f)
485 }
486
487 pub fn scoped_try_read<'a, Key: Keyable, R>(
488 &'a self,
489 key: Key,
490 f: impl Fn(L::DataRef<'a>) -> R,
491 ) -> Result<R, Key> {
492 scoped_try_read(self, key, f)
493 }
494
495 #[must_use]
515 pub fn read(&self, key: ThreadKey) -> LockGuard<L::ReadGuard<'_>> {
516 unsafe {
517 self.raw_read();
519
520 LockGuard {
521 guard: self.child().read_guard(),
523 key,
524 }
525 }
526 }
527
528 pub fn try_read(&self, key: ThreadKey) -> Result<LockGuard<L::ReadGuard<'_>>, ThreadKey> {
559 let guard = unsafe {
560 if !self.raw_try_read() {
562 return Err(key);
563 }
564
565 self.child().read_guard()
567 };
568
569 Ok(LockGuard { guard, key })
570 }
571
572 pub fn unlock_read(guard: LockGuard<L::ReadGuard<'_>>) -> ThreadKey {
588 drop(guard.guard);
589 guard.key
590 }
591}
592
593impl<L: LockableIntoInner> BoxedLockCollection<L> {
594 #[must_use]
605 pub fn into_inner(self) -> <Self as LockableIntoInner>::Inner {
606 LockableIntoInner::into_inner(self)
607 }
608}
609
610impl<'a, L: 'a> BoxedLockCollection<L>
611where
612 &'a L: IntoIterator,
613{
614 #[must_use]
632 pub fn iter(&'a self) -> <&'a L as IntoIterator>::IntoIter {
633 self.into_iter()
634 }
635}
636
637#[cfg(test)]
638mod tests {
639 use super::*;
640 use crate::{Mutex, RwLock, ThreadKey};
641
642 #[test]
643 fn from_iterator() {
644 let key = ThreadKey::get().unwrap();
645 let collection: BoxedLockCollection<Vec<Mutex<&str>>> =
646 [Mutex::new("foo"), Mutex::new("bar"), Mutex::new("baz")]
647 .into_iter()
648 .collect();
649 let guard = collection.lock(key);
650 assert_eq!(*guard[0], "foo");
651 assert_eq!(*guard[1], "bar");
652 assert_eq!(*guard[2], "baz");
653 }
654
655 #[test]
656 fn from() {
657 let key = ThreadKey::get().unwrap();
658 let collection =
659 BoxedLockCollection::from([Mutex::new("foo"), Mutex::new("bar"), Mutex::new("baz")]);
660 let guard = collection.lock(key);
661 assert_eq!(*guard[0], "foo");
662 assert_eq!(*guard[1], "bar");
663 assert_eq!(*guard[2], "baz");
664 }
665
666 #[test]
667 fn into_owned_iterator() {
668 let collection = BoxedLockCollection::new([Mutex::new(0), Mutex::new(1), Mutex::new(2)]);
669 for (i, mutex) in collection.into_iter().enumerate() {
670 assert_eq!(mutex.into_inner(), i);
671 }
672 }
673
674 #[test]
675 fn into_ref_iterator() {
676 let mut key = ThreadKey::get().unwrap();
677 let collection = BoxedLockCollection::new([Mutex::new(0), Mutex::new(1), Mutex::new(2)]);
678 for (i, mutex) in (&collection).into_iter().enumerate() {
679 mutex.scoped_lock(&mut key, |val| assert_eq!(*val, i))
680 }
681 }
682
683 #[test]
684 fn ref_iterator() {
685 let mut key = ThreadKey::get().unwrap();
686 let collection = BoxedLockCollection::new([Mutex::new(0), Mutex::new(1), Mutex::new(2)]);
687 for (i, mutex) in collection.iter().enumerate() {
688 mutex.scoped_lock(&mut key, |val| assert_eq!(*val, i))
689 }
690 }
691
692 #[test]
693 #[allow(clippy::float_cmp)]
694 fn uses_correct_default() {
695 let collection =
696 BoxedLockCollection::<(Mutex<f64>, Mutex<Option<i32>>, Mutex<usize>)>::default();
697 let tuple = collection.into_inner();
698 assert_eq!(tuple.0, 0.0);
699 assert!(tuple.1.is_none());
700 assert_eq!(tuple.2, 0)
701 }
702
703 #[test]
704 fn non_duplicates_allowed() {
705 let mutex1 = Mutex::new(0);
706 let mutex2 = Mutex::new(1);
707 assert!(BoxedLockCollection::try_new([&mutex1, &mutex2]).is_some())
708 }
709
710 #[test]
711 fn duplicates_not_allowed() {
712 let mutex1 = Mutex::new(0);
713 assert!(BoxedLockCollection::try_new([&mutex1, &mutex1]).is_none())
714 }
715
716 #[test]
717 fn scoped_read_sees_changes() {
718 let mut key = ThreadKey::get().unwrap();
719 let mutexes = [RwLock::new(24), RwLock::new(42)];
720 let collection = BoxedLockCollection::new(mutexes);
721 collection.scoped_lock(&mut key, |guard| *guard[0] = 128);
722
723 let sum = collection.scoped_read(&mut key, |guard| {
724 assert_eq!(*guard[0], 128);
725 assert_eq!(*guard[1], 42);
726 *guard[0] + *guard[1]
727 });
728
729 assert_eq!(sum, 128 + 42);
730 }
731
732 #[test]
733 fn scoped_try_lock_can_fail() {
734 let key = ThreadKey::get().unwrap();
735 let collection = BoxedLockCollection::new([Mutex::new(1), Mutex::new(2)]);
736 let guard = collection.lock(key);
737
738 std::thread::scope(|s| {
739 s.spawn(|| {
740 let key = ThreadKey::get().unwrap();
741 let r = collection.scoped_try_lock(key, |_| {});
742 assert!(r.is_err());
743 });
744 });
745
746 drop(guard);
747 }
748
749 #[test]
750 fn scoped_try_read_can_fail() {
751 let key = ThreadKey::get().unwrap();
752 let collection = BoxedLockCollection::new([RwLock::new(1), RwLock::new(2)]);
753 let guard = collection.lock(key);
754
755 std::thread::scope(|s| {
756 s.spawn(|| {
757 let key = ThreadKey::get().unwrap();
758 let r = collection.scoped_try_read(key, |_| {});
759 assert!(r.is_err());
760 });
761 });
762
763 drop(guard);
764 }
765
766 #[test]
767 fn try_lock_works() {
768 let key = ThreadKey::get().unwrap();
769 let collection = BoxedLockCollection::new([Mutex::new(1), Mutex::new(2)]);
770 let guard = collection.try_lock(key);
771
772 std::thread::scope(|s| {
773 s.spawn(|| {
774 let key = ThreadKey::get().unwrap();
775 let guard = collection.try_lock(key);
776 assert!(guard.is_err());
777 });
778 });
779
780 assert!(guard.is_ok());
781 }
782
783 #[test]
784 fn try_read_works() {
785 let key = ThreadKey::get().unwrap();
786 let collection = BoxedLockCollection::new([RwLock::new(1), RwLock::new(2)]);
787 let guard = collection.try_read(key);
788
789 std::thread::scope(|s| {
790 s.spawn(|| {
791 let key = ThreadKey::get().unwrap();
792 let guard = collection.try_read(key);
793 assert!(guard.is_ok());
794 });
795 });
796
797 assert!(guard.is_ok());
798 }
799
800 #[test]
801 fn try_lock_fails_with_one_exclusive_lock() {
802 let key = ThreadKey::get().unwrap();
803 let locks = [Mutex::new(1), Mutex::new(2)];
804 let collection = BoxedLockCollection::new_ref(&locks);
805 let guard = locks[1].try_lock(key);
806
807 std::thread::scope(|s| {
808 s.spawn(|| {
809 let key = ThreadKey::get().unwrap();
810 let guard = collection.try_lock(key);
811 assert!(guard.is_err());
812 });
813 });
814
815 assert!(guard.is_ok());
816 }
817
818 #[test]
819 fn try_read_fails_during_exclusive_lock() {
820 let key = ThreadKey::get().unwrap();
821 let collection = BoxedLockCollection::new([RwLock::new(1), RwLock::new(2)]);
822 let guard = collection.try_lock(key);
823
824 std::thread::scope(|s| {
825 s.spawn(|| {
826 let key = ThreadKey::get().unwrap();
827 let guard = collection.try_read(key);
828 assert!(guard.is_err());
829 });
830 });
831
832 assert!(guard.is_ok());
833 }
834
835 #[test]
836 fn try_read_fails_with_one_exclusive_lock() {
837 let key = ThreadKey::get().unwrap();
838 let locks = [RwLock::new(1), RwLock::new(2)];
839 let collection = BoxedLockCollection::new_ref(&locks);
840 let guard = locks[1].try_write(key);
841
842 std::thread::scope(|s| {
843 s.spawn(|| {
844 let key = ThreadKey::get().unwrap();
845 let guard = collection.try_read(key);
846 assert!(guard.is_err());
847 });
848 });
849
850 assert!(guard.is_ok());
851 }
852
853 #[test]
854 fn unlock_collection_works() {
855 let key = ThreadKey::get().unwrap();
856 let mutex1 = Mutex::new("foo");
857 let mutex2 = Mutex::new("bar");
858 let collection = BoxedLockCollection::try_new((&mutex1, &mutex2)).unwrap();
859 let guard = collection.lock(key);
860 let key = BoxedLockCollection::<(&Mutex<_>, &Mutex<_>)>::unlock(guard);
861
862 assert!(mutex1.try_lock(key).is_ok())
863 }
864
865 #[test]
866 fn read_unlock_collection_works() {
867 let key = ThreadKey::get().unwrap();
868 let lock1 = RwLock::new("foo");
869 let lock2 = RwLock::new("bar");
870 let collection = BoxedLockCollection::try_new((&lock1, &lock2)).unwrap();
871 let guard = collection.read(key);
872 let key = BoxedLockCollection::<(&RwLock<_>, &RwLock<_>)>::unlock_read(guard);
873
874 assert!(lock1.try_write(key).is_ok())
875 }
876
877 #[test]
878 fn into_inner_works() {
879 let collection = BoxedLockCollection::new((Mutex::new("Hello"), Mutex::new(47)));
880 assert_eq!(collection.into_inner(), ("Hello", 47))
881 }
882
883 #[test]
884 fn works_in_collection() {
885 let key = ThreadKey::get().unwrap();
886 let mutex1 = RwLock::new(0);
887 let mutex2 = RwLock::new(1);
888 let collection =
889 BoxedLockCollection::try_new(BoxedLockCollection::try_new([&mutex1, &mutex2]).unwrap())
890 .unwrap();
891
892 let mut guard = collection.lock(key);
893 assert!(mutex1.is_locked());
894 assert!(mutex2.is_locked());
895 assert_eq!(*guard[0], 0);
896 assert_eq!(*guard[1], 1);
897 *guard[0] = 2;
898 let key = BoxedLockCollection::<BoxedLockCollection<[&RwLock<_>; 2]>>::unlock(guard);
899
900 let guard = collection.read(key);
901 assert!(mutex1.is_locked());
902 assert!(mutex2.is_locked());
903 assert_eq!(*guard[0], 2);
904 assert_eq!(*guard[1], 1);
905 drop(guard);
906 }
907
908 #[test]
909 fn as_ref_works() {
910 let mutexes = [Mutex::new(0), Mutex::new(1)];
911 let collection = BoxedLockCollection::new_ref(&mutexes);
912
913 assert!(std::ptr::addr_eq(&mutexes, collection.as_ref()))
914 }
915
916 #[test]
917 fn child() {
918 let mutexes = [Mutex::new(0), Mutex::new(1)];
919 let collection = BoxedLockCollection::new_ref(&mutexes);
920
921 assert!(std::ptr::addr_eq(&mutexes, *collection.child()))
922 }
923}