1pub(crate) mod sealed {
4 use super::{Fd, Fixed};
5 use std::os::unix::io::RawFd;
6
7 #[derive(Debug)]
8 pub enum Target {
9 Fd(RawFd),
10 Fixed(u32),
11 }
12
13 pub trait UseFd: Sized {
14 fn into(self) -> RawFd;
15 }
16
17 pub trait UseFixed: Sized {
18 fn into(self) -> Target;
19 }
20
21 impl UseFd for Fd {
22 #[inline]
23 fn into(self) -> RawFd {
24 self.0
25 }
26 }
27
28 impl UseFixed for Fd {
29 #[inline]
30 fn into(self) -> Target {
31 Target::Fd(self.0)
32 }
33 }
34
35 impl UseFixed for Fixed {
36 #[inline]
37 fn into(self) -> Target {
38 Target::Fixed(self.0)
39 }
40 }
41}
42
43use crate::sys;
44use crate::util::{cast_ptr, unwrap_nonzero, unwrap_u32};
45use bitflags::bitflags;
46use std::convert::TryFrom;
47use std::marker::PhantomData;
48use std::num::NonZeroU32;
49use std::os::unix::io::RawFd;
50
51pub use sys::__kernel_rwf_t as RwFlags;
52pub use sys::{
53 io_uring_region_desc, io_uring_zcrx_area_reg, io_uring_zcrx_cqe, io_uring_zcrx_ifq_reg,
54 io_uring_zcrx_rqe, IORING_MEM_REGION_TYPE_USER, IORING_ZCRX_AREA_SHIFT, IOU_PBUF_RING_INC,
55 IOU_PBUF_RING_MMAP,
56};
57
58pub const IORING_ZCRX_AREA_MASK: u64 = !((1u64 << IORING_ZCRX_AREA_SHIFT) - 1);
62
63#[repr(C)]
65#[allow(non_camel_case_types)]
66pub struct statx {
67 _priv: (),
68}
69
70#[repr(C)]
72#[allow(non_camel_case_types)]
73pub struct epoll_event {
74 _priv: (),
75}
76
77#[derive(Debug, Clone, Copy)]
79#[repr(transparent)]
80pub struct Fd(pub RawFd);
81
82#[derive(Debug, Clone, Copy)]
86#[repr(transparent)]
87pub struct Fixed(pub u32);
88
89bitflags! {
90 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
102 pub struct TimeoutFlags: u32 {
103 const ABS = sys::IORING_TIMEOUT_ABS;
104
105 const BOOTTIME = sys::IORING_TIMEOUT_BOOTTIME;
106
107 const REALTIME = sys::IORING_TIMEOUT_REALTIME;
108
109 const LINK_TIMEOUT_UPDATE = sys::IORING_LINK_TIMEOUT_UPDATE;
110
111 const ETIME_SUCCESS = sys::IORING_TIMEOUT_ETIME_SUCCESS;
112
113 const MULTISHOT = sys::IORING_TIMEOUT_MULTISHOT;
114 }
115}
116
117bitflags! {
118 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
120 pub struct FsyncFlags: u32 {
121 const DATASYNC = sys::IORING_FSYNC_DATASYNC;
122 }
123}
124
125bitflags! {
126 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
129 pub(crate) struct AsyncCancelFlags: u32 {
130 const ALL = sys::IORING_ASYNC_CANCEL_ALL;
135
136 const FD = sys::IORING_ASYNC_CANCEL_FD;
141
142 const ANY = sys::IORING_ASYNC_CANCEL_ANY;
148
149 const FD_FIXED = sys::IORING_ASYNC_CANCEL_FD_FIXED;
154 }
155}
156
157#[derive(Default, Debug, Clone, Copy)]
160#[repr(transparent)]
161pub struct OpenHow(sys::open_how);
162
163impl OpenHow {
164 pub const fn new() -> Self {
165 OpenHow(sys::open_how {
166 flags: 0,
167 mode: 0,
168 resolve: 0,
169 })
170 }
171
172 pub const fn flags(mut self, flags: u64) -> Self {
173 self.0.flags = flags;
174 self
175 }
176
177 pub const fn mode(mut self, mode: u64) -> Self {
178 self.0.mode = mode;
179 self
180 }
181
182 pub const fn resolve(mut self, resolve: u64) -> Self {
183 self.0.resolve = resolve;
184 self
185 }
186}
187
188#[derive(Default, Debug, Clone, Copy)]
189#[repr(transparent)]
190pub struct Timespec(pub(crate) sys::__kernel_timespec);
191
192impl Timespec {
193 #[inline]
194 pub const fn new() -> Self {
195 Timespec(sys::__kernel_timespec {
196 tv_sec: 0,
197 tv_nsec: 0,
198 })
199 }
200
201 #[inline]
202 pub const fn sec(mut self, sec: u64) -> Self {
203 self.0.tv_sec = sec as _;
204 self
205 }
206
207 #[inline]
208 pub const fn nsec(mut self, nsec: u32) -> Self {
209 self.0.tv_nsec = nsec as _;
210 self
211 }
212}
213
214impl From<std::time::Duration> for Timespec {
215 fn from(value: std::time::Duration) -> Self {
216 Timespec::new()
217 .sec(value.as_secs())
218 .nsec(value.subsec_nanos())
219 }
220}
221
222#[derive(Default, Debug, Clone, Copy)]
242pub struct SubmitArgs<'prev: 'now, 'now> {
243 pub(crate) args: sys::io_uring_getevents_arg,
244 prev: PhantomData<&'prev ()>,
245 now: PhantomData<&'now ()>,
246}
247
248impl<'prev, 'now> SubmitArgs<'prev, 'now> {
249 #[inline]
250 pub const fn new() -> SubmitArgs<'static, 'static> {
251 let args = sys::io_uring_getevents_arg {
252 sigmask: 0,
253 sigmask_sz: 0,
254 min_wait_usec: 0,
255 ts: 0,
256 };
257
258 SubmitArgs {
259 args,
260 prev: PhantomData,
261 now: PhantomData,
262 }
263 }
264
265 #[inline]
266 pub fn sigmask<'new>(mut self, sigmask: &'new libc::sigset_t) -> SubmitArgs<'now, 'new> {
270 self.args.sigmask = cast_ptr(sigmask) as _;
271 self.args.sigmask_sz = std::mem::size_of::<libc::sigset_t>() as _;
272
273 SubmitArgs {
274 args: self.args,
275 prev: self.now,
276 now: PhantomData,
277 }
278 }
279
280 #[inline]
281 pub fn timespec<'new>(mut self, timespec: &'new Timespec) -> SubmitArgs<'now, 'new> {
283 self.args.ts = cast_ptr(timespec) as _;
284
285 SubmitArgs {
286 args: self.args,
287 prev: self.now,
288 now: PhantomData,
289 }
290 }
291}
292
293#[repr(transparent)]
294pub struct BufRingEntry(sys::io_uring_buf);
295
296#[allow(clippy::len_without_is_empty)]
298impl BufRingEntry {
299 pub fn set_addr(&mut self, addr: u64) {
301 self.0.addr = addr;
302 }
303
304 pub fn addr(&self) -> u64 {
306 self.0.addr
307 }
308
309 pub fn set_len(&mut self, len: u32) {
311 self.0.len = len;
312 }
313
314 pub fn len(&self) -> u32 {
316 self.0.len
317 }
318
319 pub fn set_bid(&mut self, bid: u16) {
321 self.0.bid = bid;
322 }
323
324 pub fn bid(&self) -> u16 {
326 self.0.bid
327 }
328
329 pub unsafe fn tail(ring_base: *const BufRingEntry) -> *const u16 {
342 std::ptr::addr_of!((*ring_base).0.resv)
343 }
344}
345
346#[derive(Debug, Clone, Copy)]
349pub struct DestinationSlot {
350 dest: NonZeroU32,
352}
353
354impl DestinationSlot {
355 const AUTO_ALLOC: NonZeroU32 =
357 unwrap_nonzero(NonZeroU32::new(sys::IORING_FILE_INDEX_ALLOC as u32));
358
359 pub const fn auto_target() -> Self {
361 Self {
362 dest: DestinationSlot::AUTO_ALLOC,
363 }
364 }
365
366 pub fn try_from_slot_target(target: u32) -> Result<Self, u32> {
370 const MAX_INDEX: u32 = unwrap_u32(DestinationSlot::AUTO_ALLOC.get().checked_sub(2));
372
373 if target > MAX_INDEX {
374 return Err(target);
375 }
376
377 let kernel_index = target.saturating_add(1);
378 debug_assert!(0 < kernel_index && kernel_index < DestinationSlot::AUTO_ALLOC.get());
380 let dest = NonZeroU32::new(kernel_index).unwrap();
381
382 Ok(Self { dest })
383 }
384
385 pub(crate) fn kernel_index_arg(&self) -> u32 {
386 self.dest.get()
387 }
388}
389
390#[derive(Debug)]
392pub struct RecvMsgOut<'buf> {
393 header: sys::io_uring_recvmsg_out,
394 msghdr_name_len: usize,
400
401 name_data: &'buf [u8],
402 control_data: &'buf [u8],
403 payload_data: &'buf [u8],
404}
405
406impl<'buf> RecvMsgOut<'buf> {
407 const DATA_START: usize = std::mem::size_of::<sys::io_uring_recvmsg_out>();
408
409 #[allow(clippy::result_unit_err)]
415 #[allow(clippy::useless_conversion)]
416 pub fn parse(buffer: &'buf [u8], msghdr: &libc::msghdr) -> Result<Self, ()> {
417 let msghdr_name_len = usize::try_from(msghdr.msg_namelen).unwrap();
418 let msghdr_control_len = usize::try_from(msghdr.msg_controllen).unwrap();
419
420 if Self::DATA_START
421 .checked_add(msghdr_name_len)
422 .and_then(|acc| acc.checked_add(msghdr_control_len))
423 .map(|header_len| buffer.len() < header_len)
424 .unwrap_or(true)
425 {
426 return Err(());
427 }
428 let header = unsafe {
430 buffer
431 .as_ptr()
432 .cast::<sys::io_uring_recvmsg_out>()
433 .read_unaligned()
434 };
435
436 let (name_data, control_start) = {
439 let name_start = Self::DATA_START;
440 let name_data_end =
441 name_start + usize::min(usize::try_from(header.namelen).unwrap(), msghdr_name_len);
442 let name_field_end = name_start + msghdr_name_len;
443 (&buffer[name_start..name_data_end], name_field_end)
444 };
445 let (control_data, payload_start) = {
446 let control_data_end = control_start
447 + usize::min(
448 usize::try_from(header.controllen).unwrap(),
449 msghdr_control_len,
450 );
451 let control_field_end = control_start + msghdr_control_len;
452 (&buffer[control_start..control_data_end], control_field_end)
453 };
454 let payload_data = {
455 let payload_data_end = payload_start
456 + usize::min(
457 usize::try_from(header.payloadlen).unwrap(),
458 buffer.len() - payload_start,
459 );
460 &buffer[payload_start..payload_data_end]
461 };
462
463 Ok(Self {
464 header,
465 msghdr_name_len,
466 name_data,
467 control_data,
468 payload_data,
469 })
470 }
471
472 pub fn incoming_name_len(&self) -> u32 {
479 self.header.namelen
480 }
481
482 pub fn is_name_data_truncated(&self) -> bool {
487 self.header.namelen as usize > self.msghdr_name_len
488 }
489
490 pub fn name_data(&self) -> &[u8] {
492 self.name_data
493 }
494
495 pub fn incoming_control_len(&self) -> u32 {
502 self.header.controllen
503 }
504
505 pub fn is_control_data_truncated(&self) -> bool {
510 (self.header.flags & u32::try_from(libc::MSG_CTRUNC).unwrap()) != 0
511 }
512
513 pub fn control_data(&self) -> &[u8] {
515 self.control_data
516 }
517
518 pub fn is_payload_truncated(&self) -> bool {
523 (self.header.flags & u32::try_from(libc::MSG_TRUNC).unwrap()) != 0
524 }
525
526 pub fn payload_data(&self) -> &[u8] {
528 self.payload_data
529 }
530
531 pub fn incoming_payload_len(&self) -> u32 {
538 self.header.payloadlen
539 }
540
541 pub fn flags(&self) -> u32 {
543 self.header.flags
544 }
545}
546
547#[derive(Debug)]
573pub struct CancelBuilder {
574 pub(crate) flags: AsyncCancelFlags,
575 pub(crate) user_data: Option<u64>,
576 pub(crate) fd: Option<sealed::Target>,
577}
578
579impl CancelBuilder {
580 pub const fn any() -> Self {
586 Self {
587 flags: AsyncCancelFlags::ANY,
588 user_data: None,
589 fd: None,
590 }
591 }
592
593 pub const fn user_data(user_data: u64) -> Self {
600 Self {
601 flags: AsyncCancelFlags::empty(),
602 user_data: Some(user_data),
603 fd: None,
604 }
605 }
606
607 pub fn fd(fd: impl sealed::UseFixed) -> Self {
615 let mut flags = AsyncCancelFlags::FD;
616 let target = fd.into();
617 if matches!(target, sealed::Target::Fixed(_)) {
618 flags.insert(AsyncCancelFlags::FD_FIXED);
619 }
620 Self {
621 flags,
622 user_data: None,
623 fd: Some(target),
624 }
625 }
626
627 pub fn all(mut self) -> Self {
634 self.flags.insert(AsyncCancelFlags::ALL);
635 self
636 }
637
638 pub(crate) fn to_fd(&self) -> i32 {
639 self.fd
640 .as_ref()
641 .map(|target| match *target {
642 sealed::Target::Fd(fd) => fd,
643 sealed::Target::Fixed(idx) => idx as i32,
644 })
645 .unwrap_or(-1)
646 }
647}
648
649#[derive(Default, Debug, Clone, Copy)]
652#[repr(transparent)]
653pub struct FutexWaitV(sys::futex_waitv);
654
655impl FutexWaitV {
656 pub const fn new() -> Self {
657 Self(sys::futex_waitv {
658 val: 0,
659 uaddr: 0,
660 flags: 0,
661 __reserved: 0,
662 })
663 }
664
665 pub const fn val(mut self, val: u64) -> Self {
666 self.0.val = val;
667 self
668 }
669
670 pub const fn uaddr(mut self, uaddr: u64) -> Self {
671 self.0.uaddr = uaddr;
672 self
673 }
674
675 pub const fn flags(mut self, flags: u32) -> Self {
676 self.0.flags = flags;
677 self
678 }
679}
680
681#[cfg(test)]
682mod tests {
683 use std::time::Duration;
684
685 use crate::types::sealed::Target;
686
687 use super::*;
688
689 #[test]
690 fn timespec_from_duration_converts_correctly() {
691 let duration = Duration::new(2, 500);
692 let timespec = Timespec::from(duration);
693
694 assert_eq!(timespec.0.tv_sec as u64, duration.as_secs());
695 assert_eq!(timespec.0.tv_nsec as u32, duration.subsec_nanos());
696 }
697
698 #[test]
699 fn test_cancel_builder_flags() {
700 let cb = CancelBuilder::any();
701 assert_eq!(cb.flags, AsyncCancelFlags::ANY);
702
703 let mut cb = CancelBuilder::user_data(42);
704 assert_eq!(cb.flags, AsyncCancelFlags::empty());
705 assert_eq!(cb.user_data, Some(42));
706 assert!(cb.fd.is_none());
707 cb = cb.all();
708 assert_eq!(cb.flags, AsyncCancelFlags::ALL);
709
710 let mut cb = CancelBuilder::fd(Fd(42));
711 assert_eq!(cb.flags, AsyncCancelFlags::FD);
712 assert!(matches!(cb.fd, Some(Target::Fd(42))));
713 assert!(cb.user_data.is_none());
714 cb = cb.all();
715 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::ALL);
716
717 let mut cb = CancelBuilder::fd(Fixed(42));
718 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED);
719 assert!(matches!(cb.fd, Some(Target::Fixed(42))));
720 assert!(cb.user_data.is_none());
721 cb = cb.all();
722 assert_eq!(
723 cb.flags,
724 AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED | AsyncCancelFlags::ALL
725 );
726 }
727}