1#![allow(dead_code)]
2
3use std::{
4 fmt::{self, Debug},
5 hash::Hash,
6 io,
7 mem::{self, ManuallyDrop},
8 ops::{Deref, DerefMut},
9 ptr,
10 task::Waker,
11};
12
13use compio_buf::{BufResult, IntoInner};
14use compio_send_wrapper::SendWrapper;
15use thin_cell::unsync::{Inner, Ref, ThinCell, Weak};
16
17use crate::{Carry, DriverType, Extra, OpCode, PushEntry, control::Carrier};
18
19#[repr(C)]
24pub(crate) struct RawOp<M: ?Sized> {
25 extra: Extra,
34 cancelled: bool,
36 result: PushEntry<Option<Waker>, io::Result<usize>>,
37 pub(crate) carrier: M,
38}
39
40impl<C: ?Sized> RawOp<C> {
41 pub fn extra(&self) -> &Extra {
42 &self.extra
43 }
44
45 pub fn extra_mut(&mut self) -> &mut Extra {
46 &mut self.extra
47 }
48
49 #[cfg(io_uring)]
50 pub fn wake_by_ref(&mut self) {
51 if let PushEntry::Pending(Some(w)) = &self.result {
52 w.wake_by_ref();
53 }
54 }
55}
56
57#[cfg(io_uring)]
58impl<C: crate::Carry + ?Sized> RawOp<C> {
59 pub fn create_entry<const FALLBACK: bool>(&mut self) -> crate::OpEntry {
60 if FALLBACK {
61 self.carrier.create_entry_fallback().with_extra(&self.extra)
62 } else {
63 self.carrier.create_entry().with_extra(&self.extra)
64 }
65 }
66}
67
68#[cfg(windows)]
69impl<C: crate::Carry + ?Sized> RawOp<C> {
70 pub fn operate_blocking(&mut self) -> io::Result<usize> {
75 use std::{panic::AssertUnwindSafe, task::Poll};
76
77 use crate::panic::catch_unwind_io;
78
79 let optr = self.extra_mut().optr();
80 catch_unwind_io(AssertUnwindSafe(|| unsafe {
81 match self.carrier.operate(optr.cast()) {
82 Poll::Pending => unreachable!("this operation is not overlapped"),
83 Poll::Ready(res) => res,
84 }
85 }))
86 }
87}
88
89impl<C: ?Sized> Debug for RawOp<C> {
90 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
91 f.debug_struct("RawOp")
92 .field("extra", &self.extra)
93 .field("cancelled", &self.cancelled)
94 .field("result", &self.result)
95 .field("Carrier", &"<...>")
96 .finish()
97 }
98}
99
100#[repr(transparent)]
102pub struct Key<T> {
103 erased: ErasedKey,
104 _p: std::marker::PhantomData<T>,
105}
106
107#[derive(Clone)]
113#[repr(transparent)]
114pub struct ErasedKey {
115 inner: ThinCell<RawOp<dyn Carry>>,
116}
117
118#[derive(Clone)]
120#[repr(transparent)]
121pub(crate) struct WeakKey {
122 inner: Weak<RawOp<dyn Carry>>,
123}
124
125impl<T> Clone for Key<T> {
126 fn clone(&self) -> Self {
127 Self {
128 erased: self.erased.clone(),
129 _p: std::marker::PhantomData,
130 }
131 }
132}
133
134impl<T> Debug for Key<T> {
135 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
136 write!(f, "Key({})", self.erased.inner.as_ptr() as usize)
137 }
138}
139
140impl<T> Key<T> {
141 pub(crate) fn into_raw(self) -> usize {
142 self.erased.into_raw()
143 }
144
145 pub(crate) fn erase(self) -> ErasedKey {
146 self.erased
147 }
148}
149
150impl<T: OpCode> Key<T> {
151 pub(crate) fn take_result(self) -> BufResult<usize, T> {
158 unsafe { self.erased.take_result::<T>() }
160 }
161}
162
163impl<T: OpCode + 'static> Key<T> {
164 pub(crate) fn new(op: T, extra: impl Into<Extra>, driver_ty: DriverType) -> Self {
166 let erased = ErasedKey::new(op, extra.into(), driver_ty);
167
168 Self {
169 erased,
170 _p: std::marker::PhantomData,
171 }
172 }
173
174 pub(crate) fn set_extra(&self, extra: impl Into<Extra>) {
175 self.borrow().extra = extra.into();
176 }
177}
178
179impl<T> Deref for Key<T> {
180 type Target = ErasedKey;
181
182 fn deref(&self) -> &Self::Target {
183 &self.erased
184 }
185}
186
187impl<T> DerefMut for Key<T> {
188 fn deref_mut(&mut self) -> &mut Self::Target {
189 &mut self.erased
190 }
191}
192
193impl PartialEq for ErasedKey {
194 fn eq(&self, other: &Self) -> bool {
195 self.inner.ptr_eq(&other.inner)
196 }
197}
198
199impl Eq for ErasedKey {}
200
201impl Hash for ErasedKey {
202 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
203 (self.inner.as_ptr() as usize).hash(state)
204 }
205}
206
207impl Unpin for ErasedKey {}
208
209impl ErasedKey {
210 pub(crate) fn new<T: OpCode + 'static>(op: T, extra: Extra, driver_ty: DriverType) -> Self {
212 let raw_op = RawOp {
213 extra,
214 cancelled: false,
215 result: PushEntry::Pending(None),
216 carrier: Carrier::new(op, driver_ty),
217 };
218 let mut inner = ThinCell::new(raw_op);
219 unsafe { inner.borrow_unchecked().carrier.init() };
224 Self {
225 inner: unsafe { inner.unsize(|p| p as *const Inner<RawOp<dyn Carry>>) },
226 }
227 }
228
229 pub(crate) unsafe fn from_raw(user_data: usize) -> Self {
236 let inner = unsafe { ThinCell::from_raw(user_data as *mut ()) };
237 Self { inner }
238 }
239
240 #[cfg(windows)]
247 pub(crate) unsafe fn from_optr(optr: *mut crate::sys::Overlapped) -> Self {
248 let ptr = unsafe { optr.cast::<usize>().offset(-2).cast() };
249 let inner = unsafe { ThinCell::from_raw(ptr) };
250 Self { inner }
251 }
252
253 #[cfg(windows)]
255 pub(crate) fn into_optr(self) -> *mut crate::sys::Overlapped {
256 unsafe { self.inner.leak().cast::<usize>().add(2).cast() }
257 }
258
259 pub(crate) fn downgrade(&self) -> WeakKey {
266 WeakKey {
267 inner: self.inner.downgrade(),
268 }
269 }
270
271 pub(crate) fn as_raw(&self) -> usize {
276 self.inner.as_ptr() as _
277 }
278
279 pub(crate) fn into_raw(self) -> usize {
281 self.inner.leak() as _
282 }
283
284 #[inline]
285 pub(crate) fn borrow(&self) -> Ref<'_, RawOp<dyn Carry>> {
286 self.inner.borrow()
287 }
288
289 pub(crate) fn set_cancelled(&self) -> bool {
291 let mut op = self.borrow();
292 mem::replace(&mut op.cancelled, true)
293 }
294
295 pub(crate) fn has_result(&self) -> bool {
297 self.borrow().result.is_ready()
298 }
299
300 pub(crate) fn is_unique(&self) -> bool {
302 ThinCell::count(&self.inner) == 1
303 }
304
305 pub(crate) fn set_result(&self, res: io::Result<usize>) {
307 let mut this = self.borrow();
308 {
309 let RawOp { extra, carrier, .. } = &mut *this;
310 unsafe { crate::sys::Carry::set_result(carrier, &res, extra) };
311 }
312 if let PushEntry::Pending(Some(w)) =
313 std::mem::replace(&mut this.result, PushEntry::Ready(res))
314 {
315 w.wake();
316 }
317 }
318
319 pub(crate) fn swap_extra(&self, extra: Extra) -> Extra {
322 std::mem::replace(&mut self.borrow().extra, extra)
323 }
324
325 pub(crate) fn set_waker(&self, waker: &Waker) {
327 let PushEntry::Pending(w) = &mut self.borrow().result else {
328 return;
329 };
330
331 if w.as_ref().is_some_and(|w| w.will_wake(waker)) {
332 return;
333 }
334
335 *w = Some(waker.clone());
336 }
337
338 unsafe fn take_result<T: OpCode>(self) -> BufResult<usize, T> {
349 let this = unsafe { self.inner.downcast_unchecked::<RawOp<Carrier<T>>>() };
351 let op = this.try_unwrap().map_err(|_| ()).expect("Key not unique");
352 let res = op.result.take_ready().expect("Result not ready");
353 BufResult(res, op.carrier.into_inner())
354 }
355
356 pub(crate) unsafe fn freeze(self) -> FrozenKey {
367 FrozenKey {
368 inner: ManuallyDrop::new(self),
369 thread_id: SendWrapper::new(()),
370 }
371 }
372}
373
374impl Debug for ErasedKey {
375 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
376 write!(f, "ErasedKey({})", self.inner.as_ptr() as usize)
377 }
378}
379
380impl WeakKey {
381 pub(crate) fn upgrade(&self) -> Option<ErasedKey> {
382 Some(ErasedKey {
383 inner: self.inner.upgrade()?,
384 })
385 }
386
387 pub(crate) fn as_ptr(&self) -> *const () {
388 self.inner.as_ptr()
389 }
390}
391
392impl PartialEq for WeakKey {
393 fn eq(&self, other: &Self) -> bool {
394 ptr::eq(self.inner.as_ptr(), other.inner.as_ptr())
395 }
396}
397
398impl Eq for WeakKey {}
399
400impl Hash for WeakKey {
401 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
402 (self.inner.as_ptr() as usize).hash(state)
403 }
404}
405
406impl Debug for WeakKey {
407 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
408 if let Some(upgraded) = self.inner.upgrade() {
409 Debug::fmt(&upgraded, f)
410 } else {
411 write!(f, "(Dropped)")
412 }
413 }
414}
415
416#[repr(C)]
420pub(crate) struct FrozenKey {
421 inner: ManuallyDrop<ErasedKey>,
422 thread_id: SendWrapper<()>,
423}
424
425impl FrozenKey {
426 pub fn as_mut(&mut self) -> &mut RawOp<dyn Carry> {
427 unsafe { self.inner.inner.borrow_unchecked() }
428 }
429
430 pub fn into_inner(self) -> ErasedKey {
431 let mut this = ManuallyDrop::new(self);
432 unsafe { ManuallyDrop::take(&mut this.inner) }
433 }
434}
435
436impl Drop for FrozenKey {
437 fn drop(&mut self) {
438 if self.thread_id.valid() {
439 unsafe { ManuallyDrop::drop(&mut self.inner) }
440 }
441 }
442}
443
444unsafe impl Send for FrozenKey {}
445unsafe impl Sync for FrozenKey {}
446
447pub(crate) struct BorrowedKey(ManuallyDrop<ErasedKey>);
453
454impl BorrowedKey {
455 pub unsafe fn from_raw(user_data: usize) -> Self {
456 let key = unsafe { ErasedKey::from_raw(user_data) };
457 Self(ManuallyDrop::new(key))
458 }
459
460 pub fn upgrade(self) -> ErasedKey {
461 ManuallyDrop::into_inner(self.0)
462 }
463}
464
465impl Deref for BorrowedKey {
466 type Target = ErasedKey;
467
468 fn deref(&self) -> &Self::Target {
469 &self.0
470 }
471}