1pub(crate) mod sealed {
4 use super::{Fd, Fixed};
5 use rustix::fd::RawFd;
6
7 #[derive(Debug)]
8 pub enum Target {
9 Fd(RawFd),
10 Fixed(u32),
11 }
12
13 pub trait UseFd: Sized {
14 fn into(self) -> RawFd;
15 }
16
17 pub trait UseFixed: Sized {
18 fn into(self) -> Target;
19 }
20
21 impl UseFd for Fd {
22 #[inline]
23 fn into(self) -> RawFd {
24 self.0
25 }
26 }
27
28 impl UseFixed for Fd {
29 #[inline]
30 fn into(self) -> Target {
31 Target::Fd(self.0)
32 }
33 }
34
35 impl UseFixed for Fixed {
36 #[inline]
37 fn into(self) -> Target {
38 Target::Fixed(self.0)
39 }
40 }
41}
42
43use crate::sys;
44use crate::util::{cast_ptr, unwrap_nonzero, unwrap_u32};
45use bitflags::bitflags;
46use core::convert::TryFrom;
47use core::marker::PhantomData;
48use core::num::NonZeroU32;
49use rustix::fd::RawFd;
50
51pub use sys::ReadWriteFlags as RwFlags;
52pub use sys::{
53 iovec, Advice, AtFlags, EpollEvent, Mode, MsgHdr, OFlags, RenameFlags, ResolveFlags,
54 SocketAddrLen, SocketAddrOpaque, Statx, StatxFlags,
55};
56
57#[derive(Debug, Clone, Copy)]
59#[repr(transparent)]
60pub struct Fd(pub RawFd);
61
62#[derive(Debug, Clone, Copy)]
66#[repr(transparent)]
67pub struct Fixed(pub u32);
68
69bitflags! {
70 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
82 pub struct TimeoutFlags: u32 {
83 const ABS = sys::IoringTimeoutFlags::ABS.bits();
84
85 const BOOTTIME = sys::IoringTimeoutFlags::BOOTTIME.bits();
86
87 const REALTIME = sys::IoringTimeoutFlags::REALTIME.bits();
88
89 const LINK_TIMEOUT_UPDATE = sys::IoringTimeoutFlags::UPDATE.bits();
90
91 const ETIME_SUCCESS = sys::IoringTimeoutFlags::ETIME_SUCCESS.bits();
92 }
93}
94
95bitflags! {
96 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
98 pub struct FsyncFlags: u32 {
99 const DATASYNC = sys::IoringFsyncFlags::DATASYNC.bits();
100 }
101}
102
103bitflags! {
104 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
107 pub(crate) struct AsyncCancelFlags: u32 {
108 const ALL = sys::IoringAsyncCancelFlags::ALL.bits();
113
114 const FD = sys::IoringAsyncCancelFlags::FD.bits();
119
120 const ANY = sys::IoringAsyncCancelFlags::ANY.bits();
126
127 const FD_FIXED = sys::IoringAsyncCancelFlags::FD_FIXED.bits();
132 }
133}
134
135#[derive(Default, Debug, Clone, Copy)]
138#[repr(transparent)]
139pub struct OpenHow(sys::open_how);
140
141impl OpenHow {
142 pub const fn new() -> Self {
143 OpenHow(sys::open_how::zeroed())
144 }
145
146 pub const fn flags(mut self, flags: OFlags) -> Self {
147 self.0.flags = flags.bits() as _;
148 self
149 }
150
151 pub const fn mode(mut self, mode: Mode) -> Self {
152 self.0.mode = mode.bits() as _;
153 self
154 }
155
156 pub const fn resolve(mut self, resolve: ResolveFlags) -> Self {
157 self.0.resolve = resolve;
158 self
159 }
160}
161
162#[derive(Debug, Clone, Copy)]
163#[repr(transparent)]
164pub struct Timespec(pub(crate) sys::Timespec);
165
166impl Default for Timespec {
167 fn default() -> Self {
168 Self(sys::Timespec {
169 tv_sec: 0,
170 tv_nsec: 0,
171 })
172 }
173}
174
175impl Timespec {
176 #[inline]
177 pub const fn new() -> Self {
178 Timespec(sys::Timespec {
179 tv_sec: 0,
180 tv_nsec: 0,
181 })
182 }
183
184 #[inline]
185 pub const fn sec(mut self, sec: u64) -> Self {
186 self.0.tv_sec = sec as _;
187 self
188 }
189
190 #[inline]
191 pub const fn nsec(mut self, nsec: u32) -> Self {
192 self.0.tv_nsec = nsec as _;
193 self
194 }
195}
196
197#[cfg(feature = "std")]
198impl From<std::time::Duration> for Timespec {
199 fn from(value: std::time::Duration) -> Self {
200 Timespec::new()
201 .sec(value.as_secs())
202 .nsec(value.subsec_nanos())
203 }
204}
205
206#[derive(Default, Debug, Clone, Copy)]
226pub struct SubmitArgs<'prev: 'now, 'now> {
227 pub(crate) args: sys::io_uring_getevents_arg,
228 prev: PhantomData<&'prev ()>,
229 now: PhantomData<&'now ()>,
230}
231
232impl<'prev, 'now> SubmitArgs<'prev, 'now> {
233 #[inline]
234 pub const fn new() -> SubmitArgs<'static, 'static> {
235 let args = sys::io_uring_getevents_arg {
236 sigmask: sys::io_uring_ptr::null(),
237 sigmask_sz: 0,
238 min_wait_usec: 0,
239 ts: sys::io_uring_ptr::null(),
240 };
241
242 SubmitArgs {
243 args,
244 prev: PhantomData,
245 now: PhantomData,
246 }
247 }
248
249 #[inline]
250 pub fn sigmask<'new>(mut self, sigmask: &'new sys::KernelSigSet) -> SubmitArgs<'now, 'new> {
251 self.args.sigmask = sys::io_uring_ptr::new(cast_ptr(sigmask) as _);
252 self.args.sigmask_sz = core::mem::size_of::<sys::KernelSigSet>() as _;
253 assert!(false);
254
255 SubmitArgs {
256 args: self.args,
257 prev: self.now,
258 now: PhantomData,
259 }
260 }
261
262 #[inline]
263 pub fn timespec<'new>(mut self, timespec: &'new Timespec) -> SubmitArgs<'now, 'new> {
264 self.args.ts = sys::io_uring_ptr::new(cast_ptr(timespec) as _);
265
266 SubmitArgs {
267 args: self.args,
268 prev: self.now,
269 now: PhantomData,
270 }
271 }
272}
273
274#[repr(transparent)]
275pub struct BufRingEntry(sys::io_uring_buf);
276
277#[allow(clippy::len_without_is_empty)]
279impl BufRingEntry {
280 pub fn set_addr(&mut self, addr: *mut core::ffi::c_void) {
282 self.0.addr.ptr = addr;
283 }
284
285 pub fn addr(&self) -> *mut core::ffi::c_void {
287 self.0.addr.ptr
288 }
289
290 pub fn set_len(&mut self, len: u32) {
292 self.0.len = len;
293 }
294
295 pub fn len(&self) -> u32 {
297 self.0.len
298 }
299
300 pub fn set_bid(&mut self, bid: u16) {
302 self.0.bid = bid;
303 }
304
305 pub fn bid(&self) -> u16 {
307 self.0.bid
308 }
309
310 pub unsafe fn tail(ring_base: *const BufRingEntry) -> *const u16 {
323 core::ptr::addr_of!(
324 (*ring_base.cast::<sys::io_uring_buf_ring>())
325 .tail_or_bufs
326 .tail
327 .as_ref()
328 .tail
329 )
330 }
331}
332
333#[derive(Debug, Clone, Copy)]
336pub struct DestinationSlot {
337 dest: NonZeroU32,
339}
340
341impl DestinationSlot {
342 const AUTO_ALLOC: NonZeroU32 =
344 unwrap_nonzero(NonZeroU32::new(sys::IORING_FILE_INDEX_ALLOC as u32));
345
346 pub const fn auto_target() -> Self {
348 Self {
349 dest: DestinationSlot::AUTO_ALLOC,
350 }
351 }
352
353 pub fn try_from_slot_target(target: u32) -> Result<Self, u32> {
357 const MAX_INDEX: u32 = unwrap_u32(DestinationSlot::AUTO_ALLOC.get().checked_sub(2));
359
360 if target > MAX_INDEX {
361 return Err(target);
362 }
363
364 let kernel_index = target.saturating_add(1);
365 debug_assert!(0 < kernel_index && kernel_index < DestinationSlot::AUTO_ALLOC.get());
367 let dest = NonZeroU32::new(kernel_index).unwrap();
368
369 Ok(Self { dest })
370 }
371
372 pub(crate) fn kernel_index_arg(&self) -> u32 {
373 self.dest.get()
374 }
375}
376
377#[derive(Debug)]
379pub struct RecvMsgOut<'buf> {
380 header: sys::io_uring_recvmsg_out,
381 msghdr_name_len: usize,
387
388 name_data: &'buf [u8],
389 control_data: &'buf [u8],
390 payload_data: &'buf [u8],
391}
392
393impl<'buf> RecvMsgOut<'buf> {
394 const DATA_START: usize = core::mem::size_of::<sys::io_uring_recvmsg_out>();
395
396 #[allow(clippy::result_unit_err)]
402 #[allow(clippy::useless_conversion)]
403 pub fn parse(buffer: &'buf [u8], msghdr: &MsgHdr) -> Result<Self, ()> {
404 let msghdr_name_len = usize::try_from(msghdr.msg_namelen).unwrap();
405 let msghdr_control_len = usize::try_from(msghdr.msg_controllen).unwrap();
406
407 if Self::DATA_START
408 .checked_add(msghdr_name_len)
409 .and_then(|acc| acc.checked_add(msghdr_control_len))
410 .map(|header_len| buffer.len() < header_len)
411 .unwrap_or(true)
412 {
413 return Err(());
414 }
415 let header = unsafe {
417 buffer
418 .as_ptr()
419 .cast::<sys::io_uring_recvmsg_out>()
420 .read_unaligned()
421 };
422
423 let (name_data, control_start) = {
426 let name_start = Self::DATA_START;
427 let name_data_end =
428 name_start + usize::min(usize::try_from(header.namelen).unwrap(), msghdr_name_len);
429 let name_field_end = name_start + msghdr_name_len;
430 (&buffer[name_start..name_data_end], name_field_end)
431 };
432 let (control_data, payload_start) = {
433 let control_data_end = control_start
434 + usize::min(
435 usize::try_from(header.controllen).unwrap(),
436 msghdr_control_len,
437 );
438 let control_field_end = control_start + msghdr_control_len;
439 (&buffer[control_start..control_data_end], control_field_end)
440 };
441 let payload_data = {
442 let payload_data_end = payload_start
443 + usize::min(
444 usize::try_from(header.payloadlen).unwrap(),
445 buffer.len() - payload_start,
446 );
447 &buffer[payload_start..payload_data_end]
448 };
449
450 Ok(Self {
451 header,
452 msghdr_name_len,
453 name_data,
454 control_data,
455 payload_data,
456 })
457 }
458
459 pub fn incoming_name_len(&self) -> u32 {
466 self.header.namelen
467 }
468
469 pub fn is_name_data_truncated(&self) -> bool {
474 self.header.namelen as usize > self.msghdr_name_len
475 }
476
477 pub fn name_data(&self) -> &[u8] {
479 self.name_data
480 }
481
482 pub fn incoming_control_len(&self) -> u32 {
489 self.header.controllen
490 }
491
492 pub fn is_control_data_truncated(&self) -> bool {
497 self.header.flags.contains(sys::RecvmsgOutFlags::CTRUNC)
498 }
499
500 pub fn control_data(&self) -> &[u8] {
502 self.control_data
503 }
504
505 pub fn is_payload_truncated(&self) -> bool {
510 self.header.flags.contains(sys::RecvmsgOutFlags::TRUNC)
511 }
512
513 pub fn payload_data(&self) -> &[u8] {
515 self.payload_data
516 }
517
518 pub fn incoming_payload_len(&self) -> u32 {
525 self.header.payloadlen
526 }
527
528 pub fn flags(&self) -> sys::RecvmsgOutFlags {
530 self.header.flags
531 }
532}
533
534#[derive(Debug)]
560pub struct CancelBuilder {
561 pub(crate) flags: AsyncCancelFlags,
562 pub(crate) user_data: sys::io_uring_user_data,
563 pub(crate) fd: Option<sealed::Target>,
564}
565
566impl CancelBuilder {
567 pub const fn any() -> Self {
573 Self {
574 flags: AsyncCancelFlags::ANY,
575 user_data: sys::io_uring_user_data::zeroed(),
576 fd: None,
577 }
578 }
579
580 pub fn user_data(user_data: impl Into<sys::io_uring_user_data>) -> Self {
587 Self {
588 flags: AsyncCancelFlags::empty(),
589 user_data: user_data.into(),
590 fd: None,
591 }
592 }
593
594 pub const fn user_data_u64(u64_: u64) -> Self {
596 Self {
597 flags: AsyncCancelFlags::empty(),
598 user_data: sys::io_uring_user_data::from_u64(u64_),
599 fd: None,
600 }
601 }
602
603 pub const fn user_data_ptr(ptr: *mut core::ffi::c_void) -> Self {
605 Self {
606 flags: AsyncCancelFlags::empty(),
607 user_data: sys::io_uring_user_data::from_ptr(ptr),
608 fd: None,
609 }
610 }
611
612 pub fn fd(fd: impl sealed::UseFixed) -> Self {
620 let mut flags = AsyncCancelFlags::FD;
621 let target = fd.into();
622 if matches!(target, sealed::Target::Fixed(_)) {
623 flags.insert(AsyncCancelFlags::FD_FIXED);
624 }
625 Self {
626 flags,
627 user_data: sys::io_uring_user_data::default(),
628 fd: Some(target),
629 }
630 }
631
632 pub fn all(mut self) -> Self {
639 self.flags.insert(AsyncCancelFlags::ALL);
640 self
641 }
642
643 pub(crate) fn to_fd(&self) -> i32 {
644 self.fd
645 .as_ref()
646 .map(|target| match *target {
647 sealed::Target::Fd(fd) => fd,
648 sealed::Target::Fixed(idx) => idx as i32,
649 })
650 .unwrap_or(-1)
651 }
652}
653
654#[derive(Default, Debug, Clone, Copy)]
657#[repr(transparent)]
658pub struct FutexWaitV(sys::FutexWait);
659
660impl FutexWaitV {
661 pub const fn new() -> Self {
662 Self(sys::FutexWait::new())
663 }
664
665 pub const fn val(mut self, val: u64) -> Self {
666 self.0.val = val;
667 self
668 }
669
670 pub const fn uaddr(mut self, uaddr: *mut core::ffi::c_void) -> Self {
671 self.0.uaddr = sys::FutexWaitPtr::new(uaddr);
672 self
673 }
674
675 pub const fn flags(mut self, flags: sys::FutexWaitFlags) -> Self {
676 self.0.flags = flags;
677 self
678 }
679}
680
681#[cfg(test)]
682mod tests {
683 use std::time::Duration;
684
685 use crate::types::sealed::Target;
686
687 use super::*;
688
689 #[test]
690 fn timespec_from_duration_converts_correctly() {
691 let duration = Duration::new(2, 500);
692 let timespec = Timespec::from(duration);
693
694 assert_eq!(timespec.0.tv_sec as u64, duration.as_secs());
695 assert_eq!(timespec.0.tv_nsec as u32, duration.subsec_nanos());
696 }
697
698 #[test]
699 fn test_cancel_builder_flags() {
700 let cb = CancelBuilder::any();
701 assert_eq!(cb.flags, AsyncCancelFlags::ANY);
702
703 let mut cb = CancelBuilder::user_data(42);
704 assert_eq!(cb.flags, AsyncCancelFlags::empty());
705 assert_eq!(cb.user_data, sys::io_uring_user_data::from_u64(42));
706 assert!(cb.fd.is_none());
707 cb = cb.all();
708 assert_eq!(cb.flags, AsyncCancelFlags::ALL);
709
710 let mut cb = CancelBuilder::fd(Fd(42));
711 assert_eq!(cb.flags, AsyncCancelFlags::FD);
712 assert!(matches!(cb.fd, Some(Target::Fd(42))));
713 assert_eq!(cb.user_data, Default::default());
714 cb = cb.all();
715 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::ALL);
716
717 let mut cb = CancelBuilder::fd(Fixed(42));
718 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED);
719 assert!(matches!(cb.fd, Some(Target::Fixed(42))));
720 assert_eq!(cb.user_data, Default::default());
721 cb = cb.all();
722 assert_eq!(
723 cb.flags,
724 AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED | AsyncCancelFlags::ALL
725 );
726 }
727}