1#[cfg(test)]
16use std::sync::Weak;
17use std::{fmt, ops::{Deref, DerefMut}, ptr::{self, NonNull}, sync::{atomic::{AtomicPtr, Ordering}, Arc}, time::Duration};
18
19use crossbeam_utils::Backoff;
20
21use crate::{ICoWError, ICoWLockTypes};
22
23
24
25#[derive(Debug)]
29pub struct ICoWCopy<'copy, ITEM: fmt::Debug + Send>
30{
31 read: ICoWRead<'copy, ITEM>,
33
34 new_item: ITEM,
36
37 bind: &'copy ICoW<ITEM>
39}
40
41impl<'copy, ITEM: fmt::Debug + Send> Deref for ICoWCopy<'copy, ITEM>
42{
43 type Target = ITEM;
44
45 fn deref(&self) -> &Self::Target
46 {
47 return &self.new_item;
48 }
49}
50
51impl<'copy, ITEM: fmt::Debug + Send> DerefMut for ICoWCopy<'copy, ITEM>
52{
53 fn deref_mut(&mut self) -> &mut Self::Target
54 {
55 return &mut self.new_item
56 }
57}
58
59impl<'copy, ITEM: fmt::Debug + Send> ICoWCopy<'copy, ITEM>
60{
61 pub
64 fn prev_val(&self) -> &ITEM
65 {
66 return &self.read;
67 }
68
69 pub
90 fn commit(mut self) -> Result<(), (ICoWError, Self)>
91 {
92 let mut prev = self.bind.inner.load(Ordering::Relaxed);
93
94 if prev.is_null() == true
95 {
96 return Err((ICoWError::ExclusiveLockPending, self));
97 }
98
99 let box_leaked =
100 Box::leak(Box::new(ICoWInternal{ item: Arc::new(self.new_item) }));
101
102 let bkoff = Backoff::new();
103
104 loop
105 {
106 let inner = NonNull::new(box_leaked).unwrap();
107
108 let res =
109 self
110 .bind
111 .inner
112 .compare_exchange_weak(prev, inner.as_ptr(), Ordering::SeqCst, Ordering::Acquire);
113
114 if let Err(e) = res
115 {
116 if bkoff.is_completed() == false
117 {
118 prev = e;
119 bkoff.spin();
120 continue;
121 }
122 else
123 {
124 self.new_item = Arc::into_inner(unsafe { Box::from_raw(box_leaked)}.item).unwrap();
125 return Err((ICoWError::WouldBlock, self));
126 }
127 }
128
129 let boxed = unsafe { Box::from_raw(res.unwrap()) };
130
131 let val = boxed.item.clone();
132
133 drop(boxed);
134
135 std::thread::sleep(Duration::from_nanos(10));
136 drop(val);
137
138 break;
139 }
140
141 return Ok(());
142 }
143}
144
145#[derive(Debug)]
151pub struct ICoWLock<'lock, ITEM: fmt::Debug + Send>
152{
153 prev_item: Box<ICoWInternal<ITEM>>,
154 item: ITEM,
155 inst: &'lock ICoW<ITEM>
156}
157
158impl<'lock, ITEM: fmt::Debug + Send> ICoWLock<'lock, ITEM>
159{
160 pub
163 fn prev_val(&self) -> &ITEM
164 {
165 return self.prev_item.item.as_ref();
166 }
167
168 pub
178 fn commit(mut self) -> Result<(), Self>
179 {
180 let box_leaked = Box::leak(Box::new(ICoWInternal{ item: Arc::new(self.item) }));
181
182 let inner = NonNull::new(box_leaked).unwrap();
183
184 let res =
185 self
186 .inst
187 .inner
188 .compare_exchange_weak(
189 ptr::null_mut(),
190 inner.as_ptr(),
191 Ordering::SeqCst,
192 Ordering::Acquire
193 );
194
195 if let Err(_e) = res
196 {
197 self.item = Arc::into_inner(unsafe { Box::from_raw(box_leaked)}.item).unwrap();
198 return Err(self);
199 }
200
201 let temp = self.prev_item.item.clone();
202
203 std::thread::sleep(Duration::from_nanos(10));
204
205 drop(temp);
206
207 return Ok(());
208 }
209}
210
211impl<'lock, ITEM: fmt::Debug + Send> Deref for ICoWLock<'lock, ITEM>
212{
213 type Target = ITEM;
214
215 fn deref(&self) -> &Self::Target
216 {
217 return &self.item;
218 }
219}
220
221impl<'lock, ITEM: fmt::Debug + Send> DerefMut for ICoWLock<'lock, ITEM>
222{
223 fn deref_mut(&mut self) -> &mut Self::Target
224 {
225 return &mut self.item
226 }
227}
228
229#[derive(Debug)]
234pub struct ICoWRead<'read, ITEM: fmt::Debug + Send>
235{
236 item: Arc<ITEM>,
238
239 bind: &'read ICoW<ITEM>
241}
242
243impl<'read, ITEM: fmt::Debug + Send> Deref for ICoWRead<'read, ITEM>
244{
245 type Target = ITEM;
246
247 fn deref(&self) -> &Self::Target
248 {
249 return &self.item;
250 }
251}
252
253#[cfg(test)]
254impl<'read, ITEM: fmt::Debug + Send> ICoWRead<'read, ITEM>
255{
256 fn weak(&self) -> Weak<ITEM>
257 {
258 return Arc::downgrade(&self.item);
259 }
260}
261
262#[derive(Debug)]
265struct ICoWInternal<ITEM: fmt::Debug + Send>
266{
267 item: Arc<ITEM>
268}
269
270
271#[derive(Debug)]
305pub struct ICoW<ITEM: fmt::Debug + Send>
306{
307 inner: AtomicPtr<ICoWInternal<ITEM>>
310}
311
312impl<ITEM: fmt::Debug + Send> Drop for ICoW<ITEM>
313{
314 fn drop(&mut self)
315 {
316 let Err(e) =
317 self
318 .inner
319 .compare_exchange_weak(
320 ptr::null_mut(),
321 ptr::null_mut(),
322 Ordering::SeqCst,
323 Ordering::Acquire
324 )
325 else
326 {
327 return;
328 };
329
330 let mut prev_val = e;
331
332 let bkoff = Backoff::new();
333
334 loop
335 {
336 let res =
337 self
338 .inner
339 .compare_exchange_weak(
340 prev_val,
341 ptr::null_mut(),
342 Ordering::SeqCst,
343 Ordering::Acquire
344 );
345
346 if let Ok(val) = res
347 {
348 drop(unsafe {Box::from_raw(val)});
349
350 return;
351 }
352
353 prev_val = res.err().unwrap();
357
358 if bkoff.is_completed() == false
359 {
360 bkoff.snooze();
361 }
362 else
363 {
364 let prev_val = self.inner.swap(ptr::null_mut(), Ordering::Relaxed);
366
367 if prev_val.is_null() == false
368 {
369 drop(unsafe {Box::from_raw(prev_val)});
370
371 return;
372 }
373 }
374 }
375
376 }
377}
378
379impl<ITEM: fmt::Debug + Send> ICoW<ITEM>
380{
381 pub
383 fn new(item: ITEM) -> Self
384 {
385 let arc = Arc::new(item);
386
387 let inner = NonNull::new(Box::leak(Box::new(ICoWInternal{ item: arc }))).unwrap();
388
389 return Self{ inner: AtomicPtr::new(inner.as_ptr())}
390 }
391
392 pub
394 fn get_lock_type() -> ICoWLockTypes
395 {
396 return ICoWLockTypes::Atomic;
397 }
398}
399
400impl<ITEM: fmt::Debug + Send> ICoW<ITEM>
401{
402 pub
410 fn read(&self) -> ICoWRead<'_, ITEM>
411 {
412 let bkoff = Backoff::new();
413
414 loop
415 {
416 let res =
417 self
418 .inner
419 .compare_exchange_weak(
420 ptr::null_mut(),
421 ptr::null_mut(),
422 Ordering::SeqCst,
423 Ordering::Acquire
424 );
425
426 if let Err(val) = res
427 {
428 let clonned_val = unsafe { val.as_ref().unwrap() }.item.clone();
429
430 return ICoWRead{ item: clonned_val, bind: self };
431 }
432 else
433 {
434 if bkoff.is_completed() == false
435 {
436 bkoff.snooze();
437 }
438 else
439 {
440 bkoff.snooze();
442 }
443
444 continue;
445 }
446 }
447 }
448
449 pub
461 fn try_read(&self) -> Option<ICoWRead<'_, ITEM>>
462 {
463 let bkoff = Backoff::new();
464
465 loop
466 {
467 let res =
468 self
469 .inner
470 .compare_exchange_weak(
471 ptr::null_mut(),
472 ptr::null_mut(),
473 Ordering::SeqCst,
474 Ordering::Acquire
475 );
476
477 if let Err(val) = res
478 {
479 let clonned_val = unsafe { val.as_ref().unwrap() }.item.clone();
480
481 return Some(ICoWRead{ item: clonned_val, bind: self });
482 }
483 else
484 {
485 if bkoff.is_completed() == false
486 {
487 bkoff.snooze();
488 }
489 else
490 {
491 return None;
492 }
493
494 continue;
495 }
496 }
497 }
498
499
500 pub
511 fn try_new_exclusivly(&self, new_item: ITEM) -> Option<ICoWLock<'_, ITEM>>
512 {
513
514 let cur = self.inner.load(Ordering::Relaxed);
515
516 let res =
517 self
518 .inner
519 .compare_exchange_weak(cur, ptr::null_mut(), Ordering::SeqCst, Ordering::Acquire);
520
521 if let Ok(ptr) = res
522 {
523 if ptr.is_null() == true
524 {
525 return None;
527 }
528
529 let prev_val = unsafe { Box::from_raw(ptr)};
530
531 let ret = ICoWLock{ prev_item: prev_val, item: new_item, inst: self };
532
533 return Some(ret);
534 }
535 else
536 {
537 return None;
539 }
540 }
541
542 pub
557 fn try_new_inplace(&self, new_item: ITEM) -> Result<(), (ICoWError, ITEM)>
558 {
559 let read = self.try_read();
560
561 if let Some(read_res) = read
562 {
563 let ret =
564 ICoWCopy
565 {
566 read: read_res,
567 new_item: new_item,
568 bind: self
569 };
570
571 return ret.commit().map_err(|e| (e.0, e.1.new_item));
572 }
573 else
574 {
575 return Err((ICoWError::ExclusiveLockPending, new_item));
576
577 }
578 }
579
580
581 pub
601 fn try_new_exclusivly_inplace(&self, new_item: ITEM) -> Result<(), (ICoWError, ITEM)>
602 {
603
604 let cur = self.inner.load(Ordering::Relaxed);
605
606 let res =
607 self
608 .inner
609 .compare_exchange_weak(cur, ptr::null_mut(), Ordering::SeqCst, Ordering::Acquire);
610
611 if let Ok(ptr) = res
612 {
613 if ptr.is_null() == true
614 {
615 return Err((ICoWError::ExclusiveLockPending, new_item));
617 }
618
619 let prev_val = unsafe { Box::from_raw(ptr)};
620
621 let ret = ICoWLock{ prev_item: prev_val, item: new_item, inst: self };
622
623 return ret.commit().map_err(|e| (ICoWError::RaceCondition, e.item));
624 }
625 else
626 {
627 return Err((ICoWError::AlreadyUpdated, new_item));
629 }
630 }
631}
632
633impl<ITEM: fmt::Debug + Send + Copy> ICoW<ITEM>
634{
635 pub
645 fn try_copy(&self) -> Option<ICoWCopy<'_, ITEM>>
646 {
647 let read = self.try_read()?;
648
649 let new_item = *read.item.as_ref();
650
651 let ret =
652 ICoWCopy
653 {
654 read: read,
655 new_item: new_item,
656 bind: self
657 };
658
659 return Some(ret);
660 }
661
662 pub
673 fn try_copy_exclusivly(&self) -> Option<ICoWLock<'_, ITEM>>
674 {
675
676 let cur = self.inner.load(Ordering::Relaxed);
677
678 let res =
679 self
680 .inner
681 .compare_exchange_weak(cur, ptr::null_mut(), Ordering::SeqCst, Ordering::Acquire);
682
683 if let Ok(ptr) = res
684 {
685 if ptr.is_null() == true
686 {
687 return None;
689 }
690
691 let prev_val = unsafe { Box::from_raw(ptr)};
692 let new_val = *prev_val.item.as_ref();
693
694 let ret = ICoWLock{ prev_item: prev_val, item: new_val, inst: self };
695
696 return Some(ret);
697 }
698 else
699 {
700 return None;
702 }
703 }
704}
705
706impl<ITEM: fmt::Debug + Send + Clone> ICoW<ITEM>
707{
708 pub
718 fn try_clone(&self) -> Option<ICoWCopy<'_, ITEM>>
719 {
720 let read = self.try_read()?;
721
722 let new_item = read.item.as_ref().clone();
723
724 let ret =
725 ICoWCopy
726 {
727 read: read,
728 new_item: new_item,
729 bind: self
730 };
731
732 return Some(ret);
733 }
734
735 pub
746 fn try_clone_exclusivly(&self) -> Option<ICoWLock<'_, ITEM>>
747 {
748
749 let cur = self.inner.load(Ordering::Relaxed);
750
751 let res =
752 self
753 .inner
754 .compare_exchange_weak(cur, ptr::null_mut(), Ordering::SeqCst, Ordering::Acquire);
755
756 if let Ok(ptr) = res
757 {
758 if ptr.is_null() == true
759 {
760 return None;
762 }
763
764 let prev_val = unsafe { Box::from_raw(ptr)};
765 let new_val = prev_val.item.as_ref().clone();
766
767 let ret = ICoWLock{ prev_item: prev_val, item: new_val, inst: self };
768
769 return Some(ret);
770 }
771 else
772 {
773 return None;
775 }
776 }
777}
778
779impl<ITEM: fmt::Debug + Send + Default> ICoW<ITEM>
780{
781 pub
791 fn try_default(&self) -> Option<ICoWCopy<'_, ITEM>>
792 {
793 let read = self.try_read()?;
794
795 let new_item = ITEM::default();
796
797 let ret =
798 ICoWCopy
799 {
800 read: read,
801 new_item: new_item,
802 bind: self
803 };
804
805 return Some(ret);
806 }
807
808
809 pub
820 fn try_default_exclusivly(&self) -> Option<ICoWLock<'_, ITEM>>
821 {
822
823 let cur = self.inner.load(Ordering::Relaxed);
824
825 let res =
826 self
827 .inner
828 .compare_exchange_weak(cur, ptr::null_mut(), Ordering::SeqCst, Ordering::Acquire);
829
830 if let Ok(ptr) = res
831 {
832 if ptr.is_null() == true
833 {
834 return None;
836 }
837
838 let prev_val = unsafe { Box::from_raw(ptr)};
839 let new_val = ITEM::default();
840
841 let ret = ICoWLock{ prev_item: prev_val, item: new_val, inst: self };
842
843 return Some(ret);
844 }
845 else
846 {
847 return None;
849 }
850 }
851}
852
853#[cfg(test)]
854mod test
855{
856 use std::{sync::{mpsc, LazyLock, Mutex}, time::{Duration, Instant}};
857
858 use super::*;
859
860
861
862 #[test]
863 fn test_2()
864 {
865 struct Test1
866 {
867 s: String
868 }
869
870 let a = Arc::new(4);
871 let w = Arc::downgrade(&a);
872
873 for _ in 0..10
874 {
875 let s = Instant::now();
876
877 let arc = w.upgrade();
878
879 let e = s.elapsed();
880
881 assert_eq!(arc.is_some(), true);
882
883 println!("{:?}", e);
884 }
885
886 println!("--------");
887
888
889 let mutex = Mutex::new(5);
890
891 for _ in 0..10
892 {
893 let s = Instant::now();
894
895 let arc = mutex.lock();
896
897 let e = s.elapsed();
898
899 assert_eq!(arc.is_ok(), true);
900
901 println!("{:?}", e);
902 }
903 }
904
905 #[test]
906 fn test_3()
907 {
908 #[derive(Debug, Clone)]
909 struct Test { s: String };
910
911 let icow = ICoW::new(Test{ s: "test".into() });
912
913 for _ in 0..10
914 {
915 let s = Instant::now();
916
917 let read0 = icow.read();
918
919 let e = s.elapsed();
920
921 println!("{:?}", e);
922 }
923
924 }
925
926 #[test]
927 fn test_33()
928 {
929 #[derive(Debug, Clone)]
930 struct Test { s: String };
931
932 let icow = ICoW::new(Test{ s: "test".into() });
933
934 let write_ex = icow.try_clone_exclusivly().unwrap();
935
936 let write_ex_err = icow.try_clone_exclusivly();
937 assert_eq!(write_ex_err.is_none(), true);
938
939 let write_ex_err = icow.try_clone();
940 assert_eq!(write_ex_err.is_none(), true);
941
942 drop(write_ex);
943
944 drop(icow);
945
946 }
947
948 #[test]
949 fn test_4()
950 {
951 #[derive(Debug, Clone)]
952 struct Test { s: u32 }
953
954 let icow = ICoW::new(Test{ s: 1 });
955
956 let read0 = icow.read();
957 let read1 = icow.read();
958
959 let mut excl_write = icow.try_clone_exclusivly().unwrap();
960
961 excl_write.item.s = 5;
962
963
964 excl_write.commit().unwrap();
965
966 assert_eq!(read0.item.s, 1);
967
968 let read3 = icow.read();
969 assert_eq!(read3.item.s, 5);
970
971 let mut writing = icow.try_clone().unwrap();
972 writing.s = 4;
973
974 writing.commit().unwrap();
975
976 assert_eq!(read0.item.s, 1);
977 assert_eq!(read3.item.s, 5);
978
979 let read4 = icow.read();
980 assert_eq!(read4.s, 4);
981 }
982
983 #[test]
984 fn test_5()
985 {
986 #[derive(Debug, Clone)]
987 struct Test { s: u32 }
988
989 let icow = Arc::new(ICoW::new(Test{ s: 1 }));
990
991 let read0 = icow.read();
992 let read2 = icow.read();
993
994 let c_icow = icow.clone();
995
996 let (se, rc) = mpsc::channel::<()>();
997 let handler0 =
998 std::thread::spawn(move ||
999 {
1000 let mut lock0 = c_icow.try_clone_exclusivly().unwrap();
1001
1002 se.send(()).unwrap();
1003
1004 lock0.item.s = 5;
1005
1006 std::thread::sleep(Duration::from_micros(2));
1007
1008 lock0.commit().unwrap();
1009 }
1010 );
1011
1012 rc.recv().unwrap();
1013
1014 let s = Instant::now();
1015
1016 let read1 = icow.read();
1017
1018 let e = s.elapsed();
1019
1020 println!("{:?}", e);
1021
1022
1023 assert_eq!(read1.item.s, 5);
1024 assert_eq!(read0.item.s, 1);
1025
1026 handler0.join().unwrap();
1027
1028 let weak0 = read0.weak();
1029 let weak1 = read1.weak();
1030
1031 drop(read0);
1032 drop(read1);
1033
1034 assert_eq!(weak0.upgrade().is_some(), true);
1035 assert_eq!(weak1.upgrade().is_some(), true);
1036
1037 drop(read2);
1038 assert_eq!(weak0.upgrade().is_none(), true);
1039 assert_eq!(weak1.upgrade().is_some(), true);
1040 }
1041
1042
1043 #[test]
1044 fn test_6()
1045 {
1046 #[derive(Debug, Clone)]
1047 struct Test { s: u32 }
1048
1049 let icow = Arc::new(ICoW::new(Test{ s: 1 }));
1050
1051 let read0 = icow.read();
1052 let read2 = icow.read();
1053
1054 let c_icow = icow.clone();
1055
1056 let (se, rc) = mpsc::channel::<()>();
1057 let handler0 =
1058 std::thread::spawn(move ||
1059 {
1060 let read2 = c_icow.read();
1061
1062 let mut lock0 = c_icow.try_clone_exclusivly().unwrap();
1063
1064 se.send(()).unwrap();
1065
1066 lock0.item.s = 5;
1067
1068 std::thread::sleep(Duration::from_nanos(50));
1069 lock0.commit().unwrap();
1070
1071 let read3 = c_icow.read();
1072
1073 assert_eq!(read2.item.s, 1);
1074 assert_eq!(read3.item.s, 5);
1075 }
1076 );
1077
1078 rc.recv().unwrap();
1079
1080 for _ in 0..100000000
1081 {
1082 let read1 = icow.read();
1083
1084 if read1.item.s == 1
1085 {
1086 continue;
1087 }
1088 else
1089 {
1090 break;
1091 }
1092 }
1093
1094 let read1 = icow.read();
1095 assert_eq!(read1.item.s, 5);
1096
1097 handler0.join().unwrap();
1098
1099 return;
1100 }
1101
1102
1103 #[test]
1104 fn test_7()
1105 {
1106 #[derive(Debug, Clone)]
1107 struct TestStruct { s: u32 }
1108
1109 impl TestStruct
1110 {
1111 fn new(s: u32) -> Self
1112 {
1113 return Self{ s: s };
1114 }
1115 }
1116
1117 static VAL: LazyLock<ICoW<TestStruct>> =
1118 LazyLock::new(|| ICoW::new(TestStruct::new(1)));
1119
1120
1121
1122
1123 let borrow1 = VAL.read();
1124 assert_eq!(borrow1.item.s, 1);
1125
1126 let (mpsc_send, mpsc_rcv) = mpsc::channel::<u64>();
1127 let (mpsc_send2, mpsc_rcv2) = mpsc::channel::<u64>();
1128
1129 let thread1 =
1130 std::thread::spawn(move ||
1131 {
1132 for _ in 0..1000
1133 {
1134 let _ = mpsc_rcv2.recv();
1135 let borrow1 = VAL.read();
1136
1137 let mut transaction = VAL.try_clone_exclusivly().unwrap();
1138
1139 transaction.item.s = 5;
1140
1141
1142
1143 std::thread::sleep(Duration::from_nanos(1001));
1144 transaction.commit().unwrap();
1145
1146 let borrow2 = VAL.read();
1147
1148
1149 assert_eq!(borrow1.item.s, 1);
1150
1151 assert_eq!(borrow2.item.s, 5);
1152
1153 let _ = mpsc_send.send(1);
1154 }
1155 }
1156 );
1157
1158
1159
1160 for x in 0..1000
1161 {
1162 println!("{}", x);
1163 mpsc_send2.send(1).unwrap();
1164 let _ = mpsc_rcv.recv();
1165
1166 let borrow1 = VAL.read();
1167 assert_eq!(borrow1.item.s, 5);
1168
1169 let mut transaction = VAL.try_clone_exclusivly().unwrap();
1170 transaction.item.s = 1;
1171 transaction.commit().unwrap();
1172
1173 let borrow1 = VAL.read();
1174 assert_eq!(borrow1.item.s, 1);
1175
1176 }
1177
1178
1179
1180 thread1.join().unwrap();
1181
1182
1183 }
1184
1185
1186 #[test]
1187 fn test_8()
1188 {
1189 #[derive(Debug, Clone)]
1190 struct Test { s: u32 }
1191
1192 let icow = Arc::new(ICoW::new(Test{ s: 1 }));
1193
1194 for _ in 0..20
1195 {
1196 let read0 = icow.read();
1197 let read2 = icow.read();
1198
1199 let c_icow = icow.clone();
1200
1201 let handler0 =
1203 std::thread::spawn(move ||
1204 {
1205 let read2 = c_icow.read();
1206
1207 let mut lock0 = c_icow.try_clone().unwrap();
1208
1209 lock0.s = 5;
1212
1213 std::thread::sleep(Duration::from_micros(1));
1214 lock0.commit().unwrap();
1215
1216 let read3 = c_icow.read();
1217
1218 assert_eq!(read2.item.s, 1);
1219 assert_eq!(read3.item.s, 5);
1220 }
1221 );
1222
1223 for i in 0..1000000000
1226 {
1227 let read1 = icow.read();
1228
1229 if read1.item.s == 1
1230 {
1231 continue;
1232 }
1233 else
1234 {
1235 println!("{}", i);
1236 break;
1237 }
1238 }
1239
1240 let read1 = icow.read();
1241 assert_eq!(read1.item.s, 5);
1242
1243 handler0.join().unwrap();
1244
1245 let mut lock0 = icow.try_clone().unwrap();
1246 lock0.s = 1;
1247 lock0.commit().unwrap();
1248 }
1249
1250 return;
1251 }
1252}