1#![allow(dead_code)]
2
3use std::{
4 fmt::{self, Debug},
5 hash::Hash,
6 io,
7 mem::ManuallyDrop,
8 ops::{Deref, DerefMut},
9 pin::Pin,
10 task::Waker,
11};
12
13use compio_buf::BufResult;
14use thin_cell::{Ref, ThinCell};
15
16use crate::{Extra, OpCode, PushEntry};
17
18#[repr(C)]
23pub(crate) struct RawOp<T: ?Sized> {
24 extra: Extra,
33 cancelled: bool,
35 result: PushEntry<Option<Waker>, io::Result<usize>>,
36 pub(crate) op: T,
37}
38
39impl<T: ?Sized> RawOp<T> {
40 pub fn extra(&self) -> &Extra {
41 &self.extra
42 }
43
44 pub fn extra_mut(&mut self) -> &mut Extra {
45 &mut self.extra
46 }
47
48 fn pinned_op(&mut self) -> Pin<&mut T> {
49 unsafe { Pin::new_unchecked(&mut self.op) }
51 }
52}
53
54#[cfg(windows)]
55impl<T: OpCode + ?Sized> RawOp<T> {
56 pub fn operate_blocking(&mut self) -> io::Result<usize> {
61 use std::task::Poll;
62
63 let optr = self.extra_mut().optr();
64 let op = self.pinned_op();
65 let res = unsafe { op.operate(optr.cast()) };
66 match res {
67 Poll::Pending => unreachable!("this operation is not overlapped"),
68 Poll::Ready(res) => res,
69 }
70 }
71}
72
73impl<T: ?Sized> Debug for RawOp<T> {
74 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
75 f.debug_struct("RawOp")
76 .field("extra", &self.extra)
77 .field("cancelled", &self.cancelled)
78 .field("result", &self.result)
79 .field("op", &"<...>")
80 .finish()
81 }
82}
83
84#[repr(transparent)]
86pub struct Key<T> {
87 erased: ErasedKey,
88 _p: std::marker::PhantomData<T>,
89}
90
91impl<T> Unpin for Key<T> {}
92
93impl<T> Clone for Key<T> {
94 fn clone(&self) -> Self {
95 Self {
96 erased: self.erased.clone(),
97 _p: std::marker::PhantomData,
98 }
99 }
100}
101
102impl<T> Debug for Key<T> {
103 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
104 write!(f, "Key({})", self.erased.inner.as_ptr() as usize)
105 }
106}
107
108impl<T> Key<T> {
109 pub(crate) fn into_raw(self) -> usize {
110 self.erased.into_raw()
111 }
112
113 pub(crate) fn erase(self) -> ErasedKey {
114 self.erased
115 }
116
117 pub(crate) fn take_result(self) -> BufResult<usize, T> {
118 unsafe { self.erased.take_result::<T>() }
120 }
121}
122
123impl<T: OpCode + 'static> Key<T> {
124 pub(crate) fn new(op: T, extra: impl Into<Extra>) -> Self {
126 let erased = ErasedKey::new(op, extra.into());
127
128 Self {
129 erased,
130 _p: std::marker::PhantomData,
131 }
132 }
133
134 pub(crate) fn set_extra(&self, extra: impl Into<Extra>) {
135 self.borrow().extra = extra.into();
136 }
137}
138
139impl<T> Deref for Key<T> {
140 type Target = ErasedKey;
141
142 fn deref(&self) -> &Self::Target {
143 &self.erased
144 }
145}
146
147impl<T> DerefMut for Key<T> {
148 fn deref_mut(&mut self) -> &mut Self::Target {
149 &mut self.erased
150 }
151}
152
153#[derive(Clone)]
159#[repr(transparent)]
160pub struct ErasedKey {
161 inner: ThinCell<RawOp<dyn OpCode>>,
162}
163
164impl PartialEq for ErasedKey {
165 fn eq(&self, other: &Self) -> bool {
166 self.inner.ptr_eq(&other.inner)
167 }
168}
169
170impl Eq for ErasedKey {}
171
172impl Hash for ErasedKey {
173 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
174 (self.inner.as_ptr() as usize).hash(state)
175 }
176}
177
178impl Unpin for ErasedKey {}
179
180impl ErasedKey {
181 pub(crate) fn new<T: OpCode + 'static>(op: T, extra: Extra) -> Self {
183 let raw_op = RawOp {
184 extra,
185 cancelled: false,
186 result: PushEntry::Pending(None),
187 op,
188 };
189 let inner = unsafe { ThinCell::new_unsize(raw_op, |p| p as _) };
191 Self { inner }
192 }
193
194 pub(crate) unsafe fn from_raw(user_data: usize) -> Self {
201 let inner = unsafe { ThinCell::from_raw(user_data as *mut ()) };
202 Self { inner }
203 }
204
205 #[cfg(windows)]
212 pub(crate) unsafe fn from_optr(optr: *mut crate::sys::Overlapped) -> Self {
213 let ptr = unsafe { optr.cast::<usize>().offset(-2).cast() };
214 let inner = unsafe { ThinCell::from_raw(ptr) };
215 Self { inner }
216 }
217
218 #[cfg(windows)]
220 pub(crate) fn into_optr(self) -> *mut crate::sys::Overlapped {
221 unsafe { self.inner.leak().cast::<usize>().add(2).cast() }
222 }
223
224 pub(crate) fn as_raw(&self) -> usize {
229 self.inner.as_ptr() as _
230 }
231
232 pub(crate) fn into_raw(self) -> usize {
234 self.inner.leak() as _
235 }
236
237 #[inline]
238 pub(crate) fn borrow(&self) -> Ref<'_, RawOp<dyn OpCode>> {
239 self.inner.borrow()
240 }
241
242 pub(crate) fn set_cancelled(&self) {
244 self.borrow().cancelled = true;
245 }
246
247 pub(crate) fn has_result(&self) -> bool {
249 self.borrow().result.is_ready()
250 }
251
252 pub(crate) fn set_result(&self, res: io::Result<usize>) {
254 let mut this = self.borrow();
255 #[cfg(io_uring)]
256 if let Ok(res) = res
257 && this.extra.is_iour()
258 {
259 unsafe {
260 Pin::new_unchecked(&mut this.op).set_result(res);
261 }
262 }
263 if let PushEntry::Pending(Some(w)) =
264 std::mem::replace(&mut this.result, PushEntry::Ready(res))
265 {
266 w.wake();
267 }
268 }
269
270 pub(crate) fn swap_extra(&self, extra: Extra) -> Extra {
273 std::mem::replace(&mut self.borrow().extra, extra)
274 }
275
276 pub(crate) fn set_waker(&self, waker: &Waker) {
278 let PushEntry::Pending(w) = &mut self.borrow().result else {
279 return;
280 };
281
282 if w.as_ref().is_some_and(|w| w.will_wake(waker)) {
283 return;
284 }
285
286 *w = Some(waker.clone());
287 }
288
289 unsafe fn take_result<T>(self) -> BufResult<usize, T> {
300 let this = unsafe { self.inner.downcast_unchecked::<RawOp<T>>() };
302 let op = this.try_unwrap().map_err(|_| ()).expect("Key not unique");
303 let res = op.result.take_ready().expect("Result not ready");
304 BufResult(res, op.op)
305 }
306
307 pub(crate) unsafe fn freeze(self) -> FrozenKey {
318 FrozenKey {
319 inner: ManuallyDrop::new(self),
320 }
321 }
322}
323
324impl Debug for ErasedKey {
325 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
326 write!(f, "ErasedKey({})", self.inner.as_ptr() as usize)
327 }
328}
329
330#[repr(transparent)]
334pub(crate) struct FrozenKey {
335 inner: ManuallyDrop<ErasedKey>,
336}
337
338impl FrozenKey {
339 pub fn as_mut(&mut self) -> &mut RawOp<dyn OpCode> {
340 unsafe { self.inner.inner.borrow_unchecked() }
341 }
342
343 pub fn pinned_op(&mut self) -> Pin<&mut dyn OpCode> {
344 self.as_mut().pinned_op()
345 }
346
347 pub fn into_inner(self) -> ErasedKey {
348 ManuallyDrop::into_inner(self.inner)
349 }
350}
351
352unsafe impl Send for FrozenKey {}
353unsafe impl Sync for FrozenKey {}
354
355pub(crate) struct BorrowedKey(ManuallyDrop<ErasedKey>);
361
362impl BorrowedKey {
363 pub unsafe fn from_raw(user_data: usize) -> Self {
364 let key = unsafe { ErasedKey::from_raw(user_data) };
365 Self(ManuallyDrop::new(key))
366 }
367
368 pub fn upgrade(self) -> ErasedKey {
369 ManuallyDrop::into_inner(self.0)
370 }
371}
372
373impl Deref for BorrowedKey {
374 type Target = ErasedKey;
375
376 fn deref(&self) -> &Self::Target {
377 &self.0
378 }
379}
380
381pub trait RefExt {
382 fn pinned_op(&mut self) -> Pin<&mut dyn OpCode>;
383}
384
385impl RefExt for Ref<'_, RawOp<dyn OpCode>> {
386 fn pinned_op(&mut self) -> Pin<&mut dyn OpCode> {
387 self.deref_mut().pinned_op()
388 }
389}