1use std::{cell::UnsafeCell, future::Future, mem::MaybeUninit, ops::{Deref, DerefMut}, pin::Pin, ptr::NonNull, sync::atomic::{AtomicUsize, Ordering}, task::Waker};
2use futures::{future::FusedFuture, task::noop_waker};
3
4struct Inner<T: ?Sized> {
5 count: AtomicUsize,
6 waker: UnsafeCell<MaybeUninit<Waker>>,
7 data: UnsafeCell<T>
8}
9
10pub struct Unique<T: ?Sized>(*const Inner<T>);
13
14unsafe impl<T: ?Sized + Send> Send for Unique<T> {}
15
16unsafe impl<T: ?Sized + Sync> Sync for Unique<T> {}
17
18impl<T> Unique<T> {
19 pub fn new(t: T) -> Self {
20 Unique(Box::into_raw(Box::new(Inner {
21 count: 1.into(),
22 waker: UnsafeCell::new(MaybeUninit::uninit()),
23 data: UnsafeCell::new(t)
24 })))
25 }
26
27 pub fn into_inner(self) -> T {
28 let t = unsafe { Box::from_raw(self.0 as *mut Inner<T>) }.data.into_inner();
29 std::mem::forget(self);
30 t
31 }
32}
33
34impl<T: ?Sized> Unique<T> {
35 pub fn pin(unique: Self) -> Pin<Self> {
36 unsafe { Pin::new_unchecked(unique) }
37 }
38
39 pub fn share(self) -> (Host<T>, Share<T>) {
40 let inner = unsafe { (self.0 as *mut Inner<T>).as_mut().unwrap_unchecked() };
41 *inner.count.get_mut() = 2;
42 inner.waker.get_mut().write(noop_waker());
43 let host = Host(Some(unsafe { NonNull::new_unchecked(self.0 as *mut Inner<T>) }));
44 let share = Share(self.0);
45 std::mem::forget(self);
46 (host, share)
47 }
48
49 pub fn share_mut(self) -> (HostMut<T>, ShareMut<T>) {
50 let inner = unsafe { (self.0 as *mut Inner<T>).as_mut().unwrap_unchecked() };
51 *inner.count.get_mut() = 2;
52 inner.waker.get_mut().write(noop_waker());
53 let host = HostMut(Some(unsafe { NonNull::new_unchecked(self.0 as *mut Inner<T>) }));
54 let share = ShareMut(self.0 as *mut Inner<T>);
55 std::mem::forget(self);
56 (host, share)
57 }
58
59 pub fn share_pinned(pin: Pin<Self>) -> (Host<T>, Share<T>) {
60 let this = unsafe { Pin::into_inner_unchecked(pin) };
61 let inner = unsafe { (this.0 as *mut Inner<T>).as_mut().unwrap_unchecked() };
62 *inner.count.get_mut() = 2;
63 inner.waker.get_mut().write(noop_waker());
64 let host = Host(Some(unsafe { NonNull::new_unchecked(this.0 as *mut Inner<T>) }));
65 let share = Share(this.0);
66 std::mem::forget(this);
67 (host, share)
68 }
69
70 pub fn share_pinned_mut(pin: Pin<Self>) -> (HostMut<T>, ShareMut<T>) {
71 let this = unsafe { Pin::into_inner_unchecked(pin) };
72 let inner = unsafe { (this.0 as *mut Inner<T>).as_mut().unwrap_unchecked() };
73 *inner.count.get_mut() = 2;
74 inner.waker.get_mut().write(noop_waker());
75 let host = HostMut(Some(unsafe { NonNull::new_unchecked(this.0 as *mut Inner<T>) }));
76 let share = ShareMut(this.0 as *mut Inner<T>);
77 std::mem::forget(this);
78 (host, share)
79 }
80}
81
82impl<T: ?Sized> Deref for Unique<T> {
83 type Target = T;
84
85 fn deref(&self) -> &Self::Target {
86 unsafe { self.0.as_ref().unwrap_unchecked().data.get().as_ref().unwrap_unchecked() }
87 }
88}
89
90impl<T: ?Sized> DerefMut for Unique<T> {
91 fn deref_mut(&mut self) -> &mut Self::Target {
92 unsafe { (self.0 as *mut Inner<T>).as_mut().unwrap_unchecked() }.data.get_mut()
93 }
94}
95
96impl<T: ?Sized> Drop for Unique<T> {
97 fn drop(&mut self) {
98 drop(unsafe { Box::from_raw(self.0 as *mut Inner<T>) })
99 }
100}
101
102pub struct Share<T: ?Sized>(*const Inner<T>);
105
106unsafe impl<T: ?Sized + Send + Sync> Send for Share<T> {}
107
108unsafe impl<T: ?Sized + Send + Sync> Sync for Share<T> {}
109
110impl<T: ?Sized> Deref for Share<T> {
111 type Target = T;
112
113 fn deref(&self) -> &Self::Target {
114 unsafe { self.0.as_ref().unwrap_unchecked().data.get().as_ref().unwrap_unchecked() }
115 }
116}
117
118impl<T: ?Sized> Clone for Share<T> {
119 fn clone(&self) -> Self {
120 let count = &unsafe { self.0.as_ref().unwrap_unchecked() }.count;
121 loop {
122 match count.load(Ordering::Relaxed) {
123 0 => continue, n => match count.compare_exchange_weak(n, n+1, Ordering::Relaxed, Ordering::Relaxed) {
125 Ok(_) => break,
126 Err(_) => continue,
127 }
128 }
129 #[allow(unreachable_code)]
130 {
131 unreachable!()
132 }
133 }
134 Share(self.0)
135 }
136}
137
138impl<T: ?Sized> Drop for Share<T> {
139 fn drop(&mut self) {
140 let inner = unsafe { self.0.as_ref().unwrap_unchecked() };
141 loop {
142 match inner.count.load(Ordering::Relaxed) {
143 0 => continue, 1 => break drop(unsafe { Box::from_raw(inner as *const Inner<T> as *mut Inner<T>) }),
145 2 => match inner.count.compare_exchange_weak(2, 0, Ordering::AcqRel, Ordering::Relaxed) {
146 Ok(_) => {
147 unsafe { inner
148 .waker
149 .get()
150 .as_mut()
151 .unwrap_unchecked()
152 .assume_init_read()
153 .wake();
154 };
155 inner.count.store(1, Ordering::Release);
156 break;
157 },
158 Err(_) => continue,
159 },
160 n => match inner.count.compare_exchange_weak(n, n-1, Ordering::Release, Ordering::Relaxed) {
161 Ok(_) => break,
162 Err(_) => continue,
163 }
164 }
165 #[allow(unreachable_code)]
166 {
167 unreachable!()
168 }
169 }
170 }
171}
172
173pub struct ShareMut<T: ?Sized>(*mut Inner<T>);
174
175unsafe impl<T: ?Sized + Send> Send for ShareMut<T> {}
176
177unsafe impl<T: ?Sized + Sync> Sync for ShareMut<T> {}
178
179impl<T: ?Sized> ShareMut<T> {
180 pub fn into_share(self) -> Share<T> {
181 let share = Share(self.0);
182 std::mem::forget(self);
183 share
184 }
185
186 pub fn pinned_into_share(pin: Pin<Self>) -> Share<T> {
187 unsafe { Pin::into_inner_unchecked(pin) }.into_share()
188 }
189}
190
191impl<T: ?Sized> Deref for ShareMut<T> {
192 type Target = T;
193
194 fn deref(&self) -> &Self::Target {
195 unsafe { self.0.as_ref().unwrap_unchecked().data.get().as_ref().unwrap_unchecked() }
196 }
197}
198
199impl<T: ?Sized> DerefMut for ShareMut<T> {
200 fn deref_mut(&mut self) -> &mut Self::Target {
201 unsafe { self.0.as_ref().unwrap_unchecked().data.get().as_mut().unwrap_unchecked() }
202 }
203}
204
205impl<T: ?Sized> Drop for ShareMut<T> {
206 fn drop(&mut self) {
207 let inner = unsafe { self.0.as_ref().unwrap_unchecked() };
208 loop {
209 match inner.count.load(Ordering::Relaxed) {
210 0 => continue, 1 => break drop(unsafe { Box::from_raw(inner as *const Inner<T> as *mut Inner<T>) }),
212 2 => match inner.count.compare_exchange_weak(2, 0, Ordering::AcqRel, Ordering::Relaxed) {
213 Ok(_) => {
214 unsafe { inner
215 .waker
216 .get()
217 .as_mut()
218 .unwrap_unchecked()
219 .assume_init_read()
220 .wake();
221 };
222 inner.count.store(1, Ordering::Release);
223 break;
224 },
225 Err(_) => continue,
226 },
227 _ => unreachable!()
228 }
229 #[allow(unreachable_code)]
230 {
231 unreachable!()
232 }
233 }
234 }
235}
236
237pub struct Host<T: ?Sized>(Option<NonNull<Inner<T>>>);
240
241unsafe impl<T: ?Sized + Send + Sync> Send for Host<T> {}
242
243unsafe impl<T: ?Sized + Send + Sync> Sync for Host<T> {}
244
245impl<T: ?Sized> Host<T> {
246 pub fn count_checked(&self) -> Option<usize> {
247 Some(unsafe { self.0.as_ref()?.as_ref().count.load(Ordering::Relaxed) })
248 }
249
250 pub fn count(&self) -> usize {
251 self.count_checked().unwrap()
252 }
253
254 pub fn get_checked(&self) -> Option<&T> {
255 Some(unsafe { self.0.as_ref()?.as_ref().data.get().as_ref().unwrap_unchecked() })
256 }
257
258 pub fn get(&self) -> &T {
259 self.get_checked().unwrap()
260 }
261
262 pub fn share_checked(&self) -> Option<Share<T>> {
263 let count = &unsafe { self.0.as_ref()?.as_ref() }.count;
264 loop {
265 match count.load(Ordering::Relaxed) {
266 0 => continue, n => match count.compare_exchange_weak(n, n+1, Ordering::Relaxed, Ordering::Relaxed) {
268 Ok(_) => break,
269 Err(_) => continue,
270 }
271 }
272 #[allow(unreachable_code)]
273 {
274 unreachable!()
275 }
276 }
277 Some(Share(unsafe { self.0.as_ref().unwrap_unchecked().as_ptr() }))
278 }
279
280 pub fn share(&self) -> Share<T> {
281 self.share_checked().unwrap()
282 }
283
284 pub fn into_host_mut(self) -> HostMut<T> {
285 let host = HostMut(self.0);
286 std::mem::forget(self);
287 host
288 }
289}
290
291impl<T: ?Sized> Future for Host<T> {
292 type Output = Unique<T>;
293
294 fn poll(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll<Self::Output> {
295 let inner = unsafe { self.0.as_ref().unwrap().as_ref() };
296 loop {
297 match inner.count.load(Ordering::Relaxed) {
298 0 => {
299 while inner.count.load(Ordering::Acquire) != 1 {}
300 break std::task::Poll::Ready(Unique(unsafe { self.0.take().unwrap_unchecked() }.as_ptr()));
301 }
302 1 => break std::task::Poll::Ready(Unique(unsafe { self.0.take().unwrap_unchecked() }.as_ptr())),
303 n => match inner.count.compare_exchange_weak(n, 0, Ordering::Acquire, Ordering::Relaxed) {
304 Ok(_) => {
305 let waker = unsafe { inner.waker.get().as_mut().unwrap_unchecked() };
306 if !unsafe{ waker.assume_init_ref() }.will_wake(cx.waker()) {
307 drop(unsafe { waker.assume_init_read() });
308 waker.write(cx.waker().clone());
309 }
310 inner.count.store(n, Ordering::Release);
311 break std::task::Poll::Pending;
312 },
313 Err(_) => continue,
314 }
315 }
316 #[allow(unreachable_code)]
317 {
318 unreachable!()
319 }
320 }
321 }
322}
323
324impl<T: ?Sized> FusedFuture for Host<T> {
325 fn is_terminated(&self) -> bool {
326 self.0.is_none()
327 }
328}
329
330impl<T: ?Sized> Drop for Host<T> {
331 fn drop(&mut self) {
332 let Some(inner) = (unsafe { self.0.as_ref().map(|ptr| ptr.as_ref()) }) else { return };
333 loop {
334 match inner.count.load(Ordering::Relaxed) {
335 0 => continue, 1 => break drop(unsafe { Box::from_raw(inner as *const Inner<T> as *mut Inner<T>) }),
337 2 => match inner.count.compare_exchange_weak(2, 0, Ordering::AcqRel, Ordering::Relaxed) {
338 Ok(_) => {
339 unsafe { inner
340 .waker
341 .get()
342 .as_mut()
343 .unwrap_unchecked()
344 .assume_init_read()
345 .wake();
346 };
347 inner.count.store(1, Ordering::Release);
348 break;
349 },
350 Err(_) => continue,
351 },
352 n => match inner.count.compare_exchange_weak(n, n-1, Ordering::Release, Ordering::Relaxed) {
353 Ok(_) => break,
354 Err(_) => continue,
355 }
356 }
357 #[allow(unreachable_code)]
358 {
359 unreachable!()
360 }
361 }
362 }
363}
364
365pub struct HostMut<T: ?Sized>(Option<NonNull<Inner<T>>>);
366
367unsafe impl<T: ?Sized + Send> Send for HostMut<T> {}
368
369unsafe impl<T: ?Sized + Sync> Sync for HostMut<T> {}
370
371impl<T: ?Sized> HostMut<T> {
372 pub fn count_checked(&self) -> Option<usize> {
373 Some(unsafe { self.0.as_ref()?.as_ref().count.load(Ordering::Relaxed) })
374 }
375
376 pub fn count(&self) -> usize {
377 self.count_checked().unwrap()
378 }
379}
380
381impl<T: ?Sized> Future for HostMut<T> {
382 type Output = Unique<T>;
383
384 fn poll(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll<Self::Output> {
385 let inner = unsafe { self.0.as_ref().unwrap().as_ref() };
386 loop {
387 match inner.count.load(Ordering::Relaxed) {
388 0 => {
389 while inner.count.load(Ordering::Acquire) != 1 {}
390 break std::task::Poll::Ready(Unique(unsafe { self.0.take().unwrap_unchecked() }.as_ptr()));
391 }
392 1 => break std::task::Poll::Ready(Unique(unsafe { self.0.take().unwrap_unchecked() }.as_ptr())),
393 n => match inner.count.compare_exchange_weak(n, 0, Ordering::Acquire, Ordering::Relaxed) {
394 Ok(_) => {
395 let waker = unsafe { inner.waker.get().as_mut().unwrap_unchecked() };
396 if !unsafe{ waker.assume_init_ref() }.will_wake(cx.waker()) {
397 drop(unsafe { waker.assume_init_read() });
398 waker.write(cx.waker().clone());
399 }
400 inner.count.store(n, Ordering::Release);
401 break std::task::Poll::Pending;
402 },
403 Err(_) => continue,
404 }
405 }
406 #[allow(unreachable_code)]
407 {
408 unreachable!()
409 }
410 }
411 }
412}
413
414impl<T: ?Sized> FusedFuture for HostMut<T> {
415 fn is_terminated(&self) -> bool {
416 self.0.is_none()
417 }
418}
419
420impl<T: ?Sized> Drop for HostMut<T> {
421 fn drop(&mut self) {
422 let Some(inner) = (unsafe { self.0.as_ref().map(|ptr| ptr.as_ref()) }) else { return };
423 loop {
424 match inner.count.load(Ordering::Relaxed) {
425 0 => continue, 1 => break drop(unsafe { Box::from_raw(inner as *const Inner<T> as *mut Inner<T>) }),
427 2 => match inner.count.compare_exchange_weak(2, 0, Ordering::AcqRel, Ordering::Relaxed) {
428 Ok(_) => {
429 unsafe { inner
430 .waker
431 .get()
432 .as_mut()
433 .unwrap_unchecked()
434 .assume_init_read()
435 .wake();
436 };
437 inner.count.store(1, Ordering::Release);
438 break;
439 },
440 Err(_) => continue,
441 },
442 n => match inner.count.compare_exchange_weak(n, n-1, Ordering::Release, Ordering::Relaxed) {
443 Ok(_) => break,
444 Err(_) => continue,
445 }
446 }
447 #[allow(unreachable_code)]
448 {
449 unreachable!()
450 }
451 }
452 }
453}
454
455pub struct HostPinned<T: ?Sized>(Option<NonNull<Inner<T>>>);
458
459unsafe impl<T: ?Sized + Send + Sync> Send for HostPinned<T> {}
460
461unsafe impl<T: ?Sized + Send + Sync> Sync for HostPinned<T> {}
462
463impl<T: ?Sized> HostPinned<T> {
464 pub fn from_unpinned(host: Host<T>) -> Self {
465 let host_pinned = HostPinned(host.0);
466 std::mem::forget(host);
467 host_pinned
468 }
469
470 pub fn count_checked(&self) -> Option<usize> {
471 Some(unsafe { self.0.as_ref()?.as_ref().count.load(Ordering::Relaxed) })
472 }
473
474 pub fn count(&self) -> usize {
475 self.count_checked().unwrap()
476 }
477
478 pub fn get_checked(&self) -> Option<&T> {
479 Some(unsafe { self.0.as_ref()?.as_ref().data.get().as_ref().unwrap_unchecked() })
480 }
481
482 pub fn get(&self) -> &T {
483 self.get_checked().unwrap()
484 }
485
486 pub fn share_checked(&self) -> Option<Share<T>> {
487 let count = &unsafe { self.0.as_ref()?.as_ref() }.count;
488 loop {
489 match count.load(Ordering::Relaxed) {
490 0 => continue, n => match count.compare_exchange_weak(n, n+1, Ordering::Relaxed, Ordering::Relaxed) {
492 Ok(_) => break,
493 Err(_) => continue,
494 }
495 }
496 #[allow(unreachable_code)]
497 {
498 unreachable!()
499 }
500 }
501 Some(Share(unsafe { self.0.as_ref().unwrap_unchecked().as_ptr() }))
502 }
503
504 pub fn share(&self) -> Share<T> {
505 self.share_checked().unwrap()
506 }
507
508 pub fn into_host_mut(self) -> HostPinnedMut<T> {
509 let host = HostPinnedMut(self.0);
510 std::mem::forget(self);
511 host
512 }
513}
514
515impl<T: ?Sized> Future for HostPinned<T> {
516 type Output = Pin<Unique<T>>;
517
518 fn poll(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll<Self::Output> {
519 let inner = unsafe { self.0.as_ref().unwrap().as_ref() };
520 loop {
521 match inner.count.load(Ordering::Relaxed) {
522 0 => {
523 while inner.count.load(Ordering::Acquire) != 1 {}
524 break std::task::Poll::Ready(unsafe { Pin::new_unchecked(Unique(self.0.take().unwrap_unchecked().as_ptr())) });
525 }
526 1 => break std::task::Poll::Ready(unsafe { Pin::new_unchecked(Unique(self.0.take().unwrap_unchecked().as_ptr())) }),
527 n => match inner.count.compare_exchange_weak(n, 0, Ordering::Acquire, Ordering::Relaxed) {
528 Ok(_) => {
529 let waker = unsafe { inner.waker.get().as_mut().unwrap_unchecked() };
530 if !unsafe{ waker.assume_init_ref() }.will_wake(cx.waker()) {
531 drop(unsafe { waker.assume_init_read() });
532 waker.write(cx.waker().clone());
533 }
534 inner.count.store(n, Ordering::Release);
535 break std::task::Poll::Pending;
536 },
537 Err(_) => continue,
538 }
539 }
540 #[allow(unreachable_code)]
541 {
542 unreachable!()
543 }
544 }
545 }
546}
547
548impl<T: ?Sized> FusedFuture for HostPinned<T> {
549 fn is_terminated(&self) -> bool {
550 self.0.is_none()
551 }
552}
553
554impl<T: ?Sized> Drop for HostPinned<T> {
555 fn drop(&mut self) {
556 let Some(inner) = (unsafe { self.0.as_ref().map(|ptr| ptr.as_ref()) }) else { return };
557 loop {
558 match inner.count.load(Ordering::Relaxed) {
559 0 => continue, 1 => break drop(unsafe { Box::from_raw(inner as *const Inner<T> as *mut Inner<T>) }),
561 2 => match inner.count.compare_exchange_weak(2, 0, Ordering::AcqRel, Ordering::Relaxed) {
562 Ok(_) => {
563 unsafe { inner
564 .waker
565 .get()
566 .as_mut()
567 .unwrap_unchecked()
568 .assume_init_read()
569 .wake();
570 };
571 inner.count.store(1, Ordering::Release);
572 break;
573 },
574 Err(_) => continue,
575 },
576 n => match inner.count.compare_exchange_weak(n, n-1, Ordering::Release, Ordering::Relaxed) {
577 Ok(_) => break,
578 Err(_) => continue,
579 }
580 }
581 #[allow(unreachable_code)]
582 {
583 unreachable!()
584 }
585 }
586 }
587}
588
589pub struct HostPinnedMut<T: ?Sized>(Option<NonNull<Inner<T>>>);
590
591unsafe impl<T: ?Sized + Send> Send for HostPinnedMut<T> {}
592
593unsafe impl<T: ?Sized + Sync> Sync for HostPinnedMut<T> {}
594
595impl<T: ?Sized> HostPinnedMut<T> {
596 pub fn from_unpinned(host: HostMut<T>) -> Self {
597 let host_pinned = HostPinnedMut(host.0);
598 std::mem::forget(host);
599 host_pinned
600 }
601
602 pub fn count_checked(&self) -> Option<usize> {
603 Some(unsafe { self.0.as_ref()?.as_ref().count.load(Ordering::Relaxed) })
604 }
605
606 pub fn count(&self) -> usize {
607 self.count_checked().unwrap()
608 }
609}
610
611impl<T: ?Sized> Future for HostPinnedMut<T> {
612 type Output = Pin<Unique<T>>;
613
614 fn poll(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll<Self::Output> {
615 let inner = unsafe { self.0.as_ref().unwrap().as_ref() };
616 loop {
617 match inner.count.load(Ordering::Relaxed) {
618 0 => {
619 while inner.count.load(Ordering::Acquire) != 1 {}
620 break std::task::Poll::Ready(unsafe { Pin::new_unchecked(Unique(self.0.take().unwrap_unchecked().as_ptr())) });
621 }
622 1 => break std::task::Poll::Ready(unsafe { Pin::new_unchecked(Unique(self.0.take().unwrap_unchecked().as_ptr())) }),
623 n => match inner.count.compare_exchange_weak(n, 0, Ordering::Acquire, Ordering::Relaxed) {
624 Ok(_) => {
625 let waker = unsafe { inner.waker.get().as_mut().unwrap_unchecked() };
626 if !unsafe{ waker.assume_init_ref() }.will_wake(cx.waker()) {
627 drop(unsafe { waker.assume_init_read() });
628 waker.write(cx.waker().clone());
629 }
630 inner.count.store(n, Ordering::Release);
631 break std::task::Poll::Pending;
632 },
633 Err(_) => continue,
634 }
635 }
636 #[allow(unreachable_code)]
637 {
638 unreachable!()
639 }
640 }
641 }
642}
643
644impl<T: ?Sized> FusedFuture for HostPinnedMut<T> {
645 fn is_terminated(&self) -> bool {
646 self.0.is_none()
647 }
648}
649
650impl<T: ?Sized> Drop for HostPinnedMut<T> {
651 fn drop(&mut self) {
652 let Some(inner) = (unsafe { self.0.as_ref().map(|ptr| ptr.as_ref()) }) else { return };
653 loop {
654 match inner.count.load(Ordering::Relaxed) {
655 0 => continue, 1 => break drop(unsafe { Box::from_raw(inner as *const Inner<T> as *mut Inner<T>) }),
657 2 => match inner.count.compare_exchange_weak(2, 0, Ordering::AcqRel, Ordering::Relaxed) {
658 Ok(_) => {
659 unsafe { inner
660 .waker
661 .get()
662 .as_mut()
663 .unwrap_unchecked()
664 .assume_init_read()
665 .wake();
666 };
667 inner.count.store(1, Ordering::Release);
668 break;
669 },
670 Err(_) => continue,
671 },
672 n => match inner.count.compare_exchange_weak(n, n-1, Ordering::Release, Ordering::Relaxed) {
673 Ok(_) => break,
674 Err(_) => continue,
675 }
676 }
677 #[allow(unreachable_code)]
678 {
679 unreachable!()
680 }
681 }
682 }
683}
684
685#[cfg(test)]
686mod test {
687 use crate::Unique;
688
689 #[tokio::test]
690 async fn vibe_check() {
691 let unique = Unique::new(0);
692 let (host, mut share) = unique.share_mut();
693 tokio::task::spawn(async move {
694 tokio::time::sleep(std::time::Duration::from_millis(16)).await;
695 *share += 1;
696 });
697 let unique = host.await;
698 assert_eq!(unique.into_inner(), 1)
699 }
700}