1pub(crate) mod sealed {
4 use super::{Fd, Fixed};
5 use rustix::fd::RawFd;
6
7 #[derive(Debug)]
8 pub enum Target {
9 Fd(RawFd),
10 Fixed(u32),
11 }
12
13 pub trait UseFd: Sized {
14 fn into(self) -> RawFd;
15 }
16
17 pub trait UseFixed: Sized {
18 fn into(self) -> Target;
19 }
20
21 impl UseFd for Fd {
22 #[inline]
23 fn into(self) -> RawFd {
24 self.0
25 }
26 }
27
28 impl UseFixed for Fd {
29 #[inline]
30 fn into(self) -> Target {
31 Target::Fd(self.0)
32 }
33 }
34
35 impl UseFixed for Fixed {
36 #[inline]
37 fn into(self) -> Target {
38 Target::Fixed(self.0)
39 }
40 }
41}
42
43use crate::sys;
44use crate::util::{cast_ptr, unwrap_nonzero, unwrap_u32};
45use bitflags::bitflags;
46use core::convert::TryFrom;
47use core::marker::PhantomData;
48use core::num::NonZeroU32;
49use rustix::fd::RawFd;
50
51pub use sys::ReadWriteFlags as RwFlags;
52pub use sys::{
53 iovec, Advice, AtFlags, EpollEvent, Mode, MsgHdr, OFlags, RenameFlags, ResolveFlags,
54 SocketAddrLen, SocketAddrOpaque, Statx, StatxFlags,
55};
56
57#[derive(Debug, Clone, Copy)]
59#[repr(transparent)]
60pub struct Fd(pub RawFd);
61
62#[derive(Debug, Clone, Copy)]
66#[repr(transparent)]
67pub struct Fixed(pub u32);
68
69bitflags! {
70 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
82 pub struct TimeoutFlags: u32 {
83 const ABS = sys::IoringTimeoutFlags::ABS.bits();
84
85 const BOOTTIME = sys::IoringTimeoutFlags::BOOTTIME.bits();
86
87 const REALTIME = sys::IoringTimeoutFlags::REALTIME.bits();
88
89 const LINK_TIMEOUT_UPDATE = sys::IoringTimeoutFlags::UPDATE.bits();
90
91 const ETIME_SUCCESS = sys::IoringTimeoutFlags::ETIME_SUCCESS.bits();
92 }
93}
94
95bitflags! {
96 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
98 pub struct FsyncFlags: u32 {
99 const DATASYNC = sys::IoringFsyncFlags::DATASYNC.bits();
100 }
101}
102
103bitflags! {
104 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
107 pub(crate) struct AsyncCancelFlags: u32 {
108 const ALL = sys::IoringAsyncCancelFlags::ALL.bits();
113
114 const FD = sys::IoringAsyncCancelFlags::FD.bits();
119
120 const ANY = sys::IoringAsyncCancelFlags::ANY.bits();
126
127 const FD_FIXED = sys::IoringAsyncCancelFlags::FD_FIXED.bits();
132 }
133}
134
135#[derive(Default, Debug, Clone, Copy)]
138#[repr(transparent)]
139pub struct OpenHow(sys::open_how);
140
141impl OpenHow {
142 pub const fn new() -> Self {
143 OpenHow(sys::open_how::zeroed())
144 }
145
146 pub const fn flags(mut self, flags: OFlags) -> Self {
147 self.0.flags = flags.bits() as _;
148 self
149 }
150
151 pub const fn mode(mut self, mode: Mode) -> Self {
152 self.0.mode = mode.bits() as _;
153 self
154 }
155
156 pub const fn resolve(mut self, resolve: ResolveFlags) -> Self {
157 self.0.resolve = resolve;
158 self
159 }
160}
161
162#[derive(Debug, Clone, Copy)]
163#[repr(transparent)]
164pub struct Timespec(pub(crate) sys::Timespec);
165
166impl Default for Timespec {
167 fn default() -> Self {
168 Self(sys::Timespec {
169 tv_sec: 0,
170 tv_nsec: 0,
171 })
172 }
173}
174
175impl Timespec {
176 #[inline]
177 pub const fn new() -> Self {
178 Timespec(sys::Timespec {
179 tv_sec: 0,
180 tv_nsec: 0,
181 })
182 }
183
184 #[inline]
185 pub const fn sec(mut self, sec: u64) -> Self {
186 self.0.tv_sec = sec as _;
187 self
188 }
189
190 #[inline]
191 pub const fn nsec(mut self, nsec: u32) -> Self {
192 self.0.tv_nsec = nsec as _;
193 self
194 }
195}
196
197impl From<core::time::Duration> for Timespec {
198 fn from(value: core::time::Duration) -> Self {
199 Timespec::new()
200 .sec(value.as_secs())
201 .nsec(value.subsec_nanos())
202 }
203}
204
205#[derive(Default, Debug, Clone, Copy)]
225pub struct SubmitArgs<'prev: 'now, 'now> {
226 pub(crate) args: sys::io_uring_getevents_arg,
227 prev: PhantomData<&'prev ()>,
228 now: PhantomData<&'now ()>,
229}
230
231impl<'prev, 'now> SubmitArgs<'prev, 'now> {
232 #[inline]
233 pub const fn new() -> SubmitArgs<'static, 'static> {
234 let args = sys::io_uring_getevents_arg {
235 sigmask: sys::io_uring_ptr::null(),
236 sigmask_sz: 0,
237 min_wait_usec: 0,
238 ts: sys::io_uring_ptr::null(),
239 };
240
241 SubmitArgs {
242 args,
243 prev: PhantomData,
244 now: PhantomData,
245 }
246 }
247
248 #[inline]
249 pub fn sigmask<'new>(mut self, sigmask: &'new sys::KernelSigSet) -> SubmitArgs<'now, 'new> {
250 self.args.sigmask = sys::io_uring_ptr::new(cast_ptr(sigmask) as _);
251 self.args.sigmask_sz = core::mem::size_of::<sys::KernelSigSet>() as _;
252
253 SubmitArgs {
254 args: self.args,
255 prev: self.now,
256 now: PhantomData,
257 }
258 }
259
260 #[inline]
261 pub fn timespec<'new>(mut self, timespec: &'new Timespec) -> SubmitArgs<'now, 'new> {
262 self.args.ts = sys::io_uring_ptr::new(cast_ptr(timespec) as _);
263
264 SubmitArgs {
265 args: self.args,
266 prev: self.now,
267 now: PhantomData,
268 }
269 }
270}
271
272#[repr(transparent)]
273pub struct BufRingEntry(sys::io_uring_buf);
274
275#[allow(clippy::len_without_is_empty)]
277impl BufRingEntry {
278 pub fn set_addr(&mut self, addr: *mut core::ffi::c_void) {
280 self.0.addr.ptr = addr;
281 }
282
283 pub fn addr(&self) -> *mut core::ffi::c_void {
285 self.0.addr.ptr
286 }
287
288 pub fn set_len(&mut self, len: u32) {
290 self.0.len = len;
291 }
292
293 pub fn len(&self) -> u32 {
295 self.0.len
296 }
297
298 pub fn set_bid(&mut self, bid: u16) {
300 self.0.bid = bid;
301 }
302
303 pub fn bid(&self) -> u16 {
305 self.0.bid
306 }
307
308 pub unsafe fn tail(ring_base: *const BufRingEntry) -> *const u16 {
321 core::ptr::addr_of!(
322 (*ring_base.cast::<sys::io_uring_buf_ring>())
323 .tail_or_bufs
324 .tail
325 .as_ref()
326 .tail
327 )
328 }
329}
330
331#[derive(Debug, Clone, Copy)]
334pub struct DestinationSlot {
335 dest: NonZeroU32,
337}
338
339impl DestinationSlot {
340 const AUTO_ALLOC: NonZeroU32 =
342 unwrap_nonzero(NonZeroU32::new(sys::IORING_FILE_INDEX_ALLOC as u32));
343
344 pub const fn auto_target() -> Self {
346 Self {
347 dest: DestinationSlot::AUTO_ALLOC,
348 }
349 }
350
351 pub fn try_from_slot_target(target: u32) -> Result<Self, u32> {
355 const MAX_INDEX: u32 = unwrap_u32(DestinationSlot::AUTO_ALLOC.get().checked_sub(2));
357
358 if target > MAX_INDEX {
359 return Err(target);
360 }
361
362 let kernel_index = target.saturating_add(1);
363 debug_assert!(0 < kernel_index && kernel_index < DestinationSlot::AUTO_ALLOC.get());
365 let dest = NonZeroU32::new(kernel_index).unwrap();
366
367 Ok(Self { dest })
368 }
369
370 pub(crate) fn kernel_index_arg(&self) -> u32 {
371 self.dest.get()
372 }
373}
374
375#[derive(Debug)]
377pub struct RecvMsgOut<'buf> {
378 header: sys::io_uring_recvmsg_out,
379 msghdr_name_len: usize,
385
386 name_data: &'buf [u8],
387 control_data: &'buf [u8],
388 payload_data: &'buf [u8],
389}
390
391impl<'buf> RecvMsgOut<'buf> {
392 const DATA_START: usize = core::mem::size_of::<sys::io_uring_recvmsg_out>();
393
394 #[allow(clippy::result_unit_err)]
400 #[allow(clippy::useless_conversion)]
401 pub fn parse(buffer: &'buf [u8], msghdr: &MsgHdr) -> Result<Self, ()> {
402 let msghdr_name_len = usize::try_from(msghdr.msg_namelen).unwrap();
403 let msghdr_control_len = usize::try_from(msghdr.msg_controllen).unwrap();
404
405 if Self::DATA_START
406 .checked_add(msghdr_name_len)
407 .and_then(|acc| acc.checked_add(msghdr_control_len))
408 .map(|header_len| buffer.len() < header_len)
409 .unwrap_or(true)
410 {
411 return Err(());
412 }
413 let header = unsafe {
415 buffer
416 .as_ptr()
417 .cast::<sys::io_uring_recvmsg_out>()
418 .read_unaligned()
419 };
420
421 let (name_data, control_start) = {
424 let name_start = Self::DATA_START;
425 let name_data_end =
426 name_start + usize::min(usize::try_from(header.namelen).unwrap(), msghdr_name_len);
427 let name_field_end = name_start + msghdr_name_len;
428 (&buffer[name_start..name_data_end], name_field_end)
429 };
430 let (control_data, payload_start) = {
431 let control_data_end = control_start
432 + usize::min(
433 usize::try_from(header.controllen).unwrap(),
434 msghdr_control_len,
435 );
436 let control_field_end = control_start + msghdr_control_len;
437 (&buffer[control_start..control_data_end], control_field_end)
438 };
439 let payload_data = {
440 let payload_data_end = payload_start
441 + usize::min(
442 usize::try_from(header.payloadlen).unwrap(),
443 buffer.len() - payload_start,
444 );
445 &buffer[payload_start..payload_data_end]
446 };
447
448 Ok(Self {
449 header,
450 msghdr_name_len,
451 name_data,
452 control_data,
453 payload_data,
454 })
455 }
456
457 pub fn incoming_name_len(&self) -> u32 {
464 self.header.namelen
465 }
466
467 pub fn is_name_data_truncated(&self) -> bool {
472 self.header.namelen as usize > self.msghdr_name_len
473 }
474
475 pub fn name_data(&self) -> &[u8] {
477 self.name_data
478 }
479
480 pub fn incoming_control_len(&self) -> u32 {
487 self.header.controllen
488 }
489
490 pub fn is_control_data_truncated(&self) -> bool {
495 self.header.flags.contains(sys::RecvmsgOutFlags::CTRUNC)
496 }
497
498 pub fn control_data(&self) -> &[u8] {
500 self.control_data
501 }
502
503 pub fn is_payload_truncated(&self) -> bool {
508 self.header.flags.contains(sys::RecvmsgOutFlags::TRUNC)
509 }
510
511 pub fn payload_data(&self) -> &[u8] {
513 self.payload_data
514 }
515
516 pub fn incoming_payload_len(&self) -> u32 {
523 self.header.payloadlen
524 }
525
526 pub fn flags(&self) -> sys::RecvmsgOutFlags {
528 self.header.flags
529 }
530}
531
532#[derive(Debug)]
558pub struct CancelBuilder {
559 pub(crate) flags: AsyncCancelFlags,
560 pub(crate) user_data: sys::io_uring_user_data,
561 pub(crate) fd: Option<sealed::Target>,
562}
563
564impl CancelBuilder {
565 pub const fn any() -> Self {
571 Self {
572 flags: AsyncCancelFlags::ANY,
573 user_data: sys::io_uring_user_data::zeroed(),
574 fd: None,
575 }
576 }
577
578 pub fn user_data(user_data: impl Into<sys::io_uring_user_data>) -> Self {
585 Self {
586 flags: AsyncCancelFlags::empty(),
587 user_data: user_data.into(),
588 fd: None,
589 }
590 }
591
592 pub const fn user_data_u64(u64_: u64) -> Self {
594 Self {
595 flags: AsyncCancelFlags::empty(),
596 user_data: sys::io_uring_user_data::from_u64(u64_),
597 fd: None,
598 }
599 }
600
601 pub const fn user_data_ptr(ptr: *mut core::ffi::c_void) -> Self {
603 Self {
604 flags: AsyncCancelFlags::empty(),
605 user_data: sys::io_uring_user_data::from_ptr(ptr),
606 fd: None,
607 }
608 }
609
610 pub fn fd(fd: impl sealed::UseFixed) -> Self {
618 let mut flags = AsyncCancelFlags::FD;
619 let target = fd.into();
620 if matches!(target, sealed::Target::Fixed(_)) {
621 flags.insert(AsyncCancelFlags::FD_FIXED);
622 }
623 Self {
624 flags,
625 user_data: sys::io_uring_user_data::default(),
626 fd: Some(target),
627 }
628 }
629
630 pub fn all(mut self) -> Self {
637 self.flags.insert(AsyncCancelFlags::ALL);
638 self
639 }
640
641 pub(crate) fn to_fd(&self) -> i32 {
642 self.fd
643 .as_ref()
644 .map(|target| match *target {
645 sealed::Target::Fd(fd) => fd,
646 sealed::Target::Fixed(idx) => idx as i32,
647 })
648 .unwrap_or(-1)
649 }
650}
651
652#[derive(Default, Debug, Clone, Copy)]
655#[repr(transparent)]
656pub struct FutexWaitV(sys::FutexWait);
657
658impl FutexWaitV {
659 pub const fn new() -> Self {
660 Self(sys::FutexWait::new())
661 }
662
663 pub const fn val(mut self, val: u64) -> Self {
664 self.0.val = val;
665 self
666 }
667
668 pub const fn uaddr(mut self, uaddr: *mut core::ffi::c_void) -> Self {
669 self.0.uaddr = sys::FutexWaitPtr::new(uaddr);
670 self
671 }
672
673 pub const fn flags(mut self, flags: sys::FutexWaitFlags) -> Self {
674 self.0.flags = flags;
675 self
676 }
677}
678
679#[cfg(test)]
680mod tests {
681 use core::time::Duration;
682
683 use crate::types::sealed::Target;
684
685 use super::*;
686
687 #[test]
688 fn timespec_from_duration_converts_correctly() {
689 let duration = Duration::new(2, 500);
690 let timespec = Timespec::from(duration);
691
692 assert_eq!(timespec.0.tv_sec as u64, duration.as_secs());
693 assert_eq!(timespec.0.tv_nsec as u32, duration.subsec_nanos());
694 }
695
696 #[test]
697 fn test_cancel_builder_flags() {
698 let cb = CancelBuilder::any();
699 assert_eq!(cb.flags, AsyncCancelFlags::ANY);
700
701 let mut cb = CancelBuilder::user_data(42);
702 assert_eq!(cb.flags, AsyncCancelFlags::empty());
703 assert_eq!(cb.user_data, sys::io_uring_user_data::from_u64(42));
704 assert!(cb.fd.is_none());
705 cb = cb.all();
706 assert_eq!(cb.flags, AsyncCancelFlags::ALL);
707
708 let mut cb = CancelBuilder::fd(Fd(42));
709 assert_eq!(cb.flags, AsyncCancelFlags::FD);
710 assert!(matches!(cb.fd, Some(Target::Fd(42))));
711 assert_eq!(cb.user_data, Default::default());
712 cb = cb.all();
713 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::ALL);
714
715 let mut cb = CancelBuilder::fd(Fixed(42));
716 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED);
717 assert!(matches!(cb.fd, Some(Target::Fixed(42))));
718 assert_eq!(cb.user_data, Default::default());
719 cb = cb.all();
720 assert_eq!(
721 cb.flags,
722 AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED | AsyncCancelFlags::ALL
723 );
724 }
725}