1use std::fmt::{self, Debug};
14use std::mem::size_of;
15use std::ops::Deref;
16
17use vm_memory::bitmap::{BitmapSlice, WithBitmapSlice};
18use vm_memory::{Address, Bytes, GuestAddress, GuestMemory, GuestMemoryRegion};
19
20use crate::{desc::split::Descriptor, Error, Reader, Writer};
21use virtio_bindings::bindings::virtio_ring::VRING_DESC_ALIGN_SIZE;
22
23#[derive(Clone, Debug)]
25pub struct DescriptorChain<M> {
26 mem: M,
27 desc_table: GuestAddress,
28 queue_size: u16,
29 head_index: u16,
30 next_index: u16,
31 ttl: u16,
32 yielded_bytes: u32,
33 is_indirect: bool,
34}
35
36impl<M> DescriptorChain<M>
37where
38 M: Deref,
39 M::Target: GuestMemory,
40{
41 fn with_ttl(
42 mem: M,
43 desc_table: GuestAddress,
44 queue_size: u16,
45 ttl: u16,
46 head_index: u16,
47 ) -> Self {
48 DescriptorChain {
49 mem,
50 desc_table,
51 queue_size,
52 head_index,
53 next_index: head_index,
54 ttl,
55 is_indirect: false,
56 yielded_bytes: 0,
57 }
58 }
59
60 pub(crate) fn new(mem: M, desc_table: GuestAddress, queue_size: u16, head_index: u16) -> Self {
70 Self::with_ttl(mem, desc_table, queue_size, queue_size, head_index)
71 }
72
73 pub fn head_index(&self) -> u16 {
75 self.head_index
76 }
77
78 pub fn memory(&self) -> &M::Target {
81 self.mem.deref()
82 }
83
84 pub fn readable(self) -> DescriptorChainRwIter<M> {
86 DescriptorChainRwIter {
87 chain: self,
88 writable: false,
89 }
90 }
91
92 pub fn writer<'a, B: BitmapSlice>(self, mem: &'a M::Target) -> Result<Writer<'a, B>, Error>
94 where
95 M::Target: Sized,
96 <<M::Target as GuestMemory>::R as GuestMemoryRegion>::B: WithBitmapSlice<'a, S = B>,
97 {
98 Writer::new(mem, self).map_err(|_| Error::InvalidChain)
99 }
100
101 pub fn reader<'a, B: BitmapSlice>(self, mem: &'a M::Target) -> Result<Reader<'a, B>, Error>
103 where
104 M::Target: Sized,
105 <<M::Target as GuestMemory>::R as GuestMemoryRegion>::B: WithBitmapSlice<'a, S = B>,
106 {
107 Reader::new(mem, self).map_err(|_| Error::InvalidChain)
108 }
109
110 pub fn writable(self) -> DescriptorChainRwIter<M> {
112 DescriptorChainRwIter {
113 chain: self,
114 writable: true,
115 }
116 }
117
118 fn switch_to_indirect_table(&mut self, desc: Descriptor) -> Result<(), Error> {
121 if self.is_indirect {
125 return Err(Error::InvalidIndirectDescriptor);
126 }
127
128 if desc.len() & (VRING_DESC_ALIGN_SIZE - 1) != 0 {
131 return Err(Error::InvalidIndirectDescriptorTable);
132 }
133
134 let table_len = desc.len() / VRING_DESC_ALIGN_SIZE;
137 if table_len > u32::from(u16::MAX) {
138 return Err(Error::InvalidIndirectDescriptorTable);
139 }
140
141 self.desc_table = desc.addr();
142 self.queue_size = u16::try_from(table_len).expect("invalid table_len");
144 self.next_index = 0;
145 self.ttl = self.queue_size;
146 self.is_indirect = true;
147
148 Ok(())
149 }
150}
151
152impl<M> Iterator for DescriptorChain<M>
153where
154 M: Deref,
155 M::Target: GuestMemory,
156{
157 type Item = Descriptor;
158
159 fn next(&mut self) -> Option<Self::Item> {
165 if self.ttl == 0 || self.next_index >= self.queue_size {
166 return None;
167 }
168
169 let desc_addr = self
170 .desc_table
171 .checked_add(self.next_index as u64 * size_of::<Descriptor>() as u64)?;
174
175 let desc = self.mem.read_obj::<Descriptor>(desc_addr).ok()?;
178
179 if desc.refers_to_indirect_table() {
180 self.switch_to_indirect_table(desc).ok()?;
181 return self.next();
182 }
183
184 match self.yielded_bytes.checked_add(desc.len()) {
189 Some(yielded_bytes) => self.yielded_bytes = yielded_bytes,
190 None => return None,
191 };
192
193 if desc.has_next() {
194 self.next_index = desc.next();
195 self.ttl -= 1;
198 } else {
199 self.ttl = 0;
200 }
201
202 Some(desc)
203 }
204}
205
206#[derive(Clone)]
208pub struct DescriptorChainRwIter<M> {
209 chain: DescriptorChain<M>,
210 writable: bool,
211}
212
213impl<M> Iterator for DescriptorChainRwIter<M>
214where
215 M: Deref,
216 M::Target: GuestMemory,
217{
218 type Item = Descriptor;
219
220 fn next(&mut self) -> Option<Self::Item> {
227 loop {
228 match self.chain.next() {
229 Some(v) => {
230 if v.is_write_only() == self.writable {
231 return Some(v);
232 }
233 }
234 None => return None,
235 }
236 }
237 }
238}
239
240impl<M> Debug for DescriptorChainRwIter<M>
242where
243 M: Debug,
244{
245 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
246 f.debug_struct("DescriptorChainRwIter")
247 .field("chain", &self.chain)
248 .field("writable", &self.writable)
249 .finish()
250 }
251}
252
253#[cfg(test)]
254mod tests {
255 use super::*;
256 use crate::desc::{split::Descriptor as SplitDescriptor, RawDescriptor};
257 use crate::mock::{DescriptorTable, MockSplitQueue};
258 use virtio_bindings::bindings::virtio_ring::{VRING_DESC_F_INDIRECT, VRING_DESC_F_NEXT};
259 use vm_memory::GuestMemoryMmap;
260
261 #[test]
262 fn test_checked_new_descriptor_chain() {
263 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
264 let vq = MockSplitQueue::new(m, 16);
265
266 assert!(vq.end().0 < 0x1000);
267
268 assert!(
270 DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 16)
271 .next()
272 .is_none()
273 );
274
275 assert!(
277 DescriptorChain::<&GuestMemoryMmap>::new(m, GuestAddress(0x00ff_ffff_ffff), 16, 0)
278 .next()
279 .is_none()
280 );
281
282 {
283 let desc = RawDescriptor::from(SplitDescriptor::new(
286 0x1000,
287 0x1000,
288 VRING_DESC_F_NEXT as u16,
289 16,
290 ));
291 vq.desc_table().store(0, desc).unwrap();
292
293 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 0);
294 c.next().unwrap();
295 assert!(c.next().is_none());
296 }
297
298 {
300 let desc = RawDescriptor::from(SplitDescriptor::new(
301 0x1000,
302 0x1000,
303 VRING_DESC_F_NEXT as u16,
304 1,
305 ));
306 vq.desc_table().store(0, desc).unwrap();
307
308 let desc = RawDescriptor::from(SplitDescriptor::new(0x2000, 0x1000, 0, 0));
309 vq.desc_table().store(1, desc).unwrap();
310
311 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 0);
312
313 assert_eq!(
314 c.memory() as *const GuestMemoryMmap,
315 m as *const GuestMemoryMmap
316 );
317
318 assert_eq!(c.desc_table, vq.start());
319 assert_eq!(c.queue_size, 16);
320 assert_eq!(c.ttl, c.queue_size);
321
322 let desc = c.next().unwrap();
323 assert_eq!(desc.addr(), GuestAddress(0x1000));
324 assert_eq!(desc.len(), 0x1000);
325 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
326 assert_eq!(desc.next(), 1);
327 assert_eq!(c.ttl, c.queue_size - 1);
328
329 assert!(c.next().is_some());
330 assert_eq!(c.ttl, 0);
332 assert!(c.next().is_none());
333 assert_eq!(c.ttl, 0);
334 }
335 }
336
337 #[test]
338 fn test_ttl_wrap_around() {
339 const QUEUE_SIZE: u16 = 16;
340
341 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x100000)]).unwrap();
342 let vq = MockSplitQueue::new(m, QUEUE_SIZE);
343
344 for i in 0..QUEUE_SIZE - 1 {
347 let desc = RawDescriptor::from(SplitDescriptor::new(
348 0x1000 * (i + 1) as u64,
349 0x1000,
350 VRING_DESC_F_NEXT as u16,
351 i + 1,
352 ));
353 vq.desc_table().store(i, desc).unwrap();
354 }
355 let desc = RawDescriptor::from(SplitDescriptor::new((0x1000 * 16) as u64, 0x1000, 0, 0));
356 vq.desc_table().store(QUEUE_SIZE - 1, desc).unwrap();
357
358 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), QUEUE_SIZE, 0);
359 assert_eq!(c.ttl, c.queue_size);
360
361 for i in 0..QUEUE_SIZE {
363 let _desc = c.next().unwrap();
364 assert_eq!(c.ttl, c.queue_size - i - 1);
365 }
366 assert!(c.next().is_none());
367 }
368
369 #[test]
370 fn test_new_from_indirect_descriptor() {
371 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
376 let vq = MockSplitQueue::new(m, 16);
377 let dtable = vq.desc_table();
378
379 let desc = RawDescriptor::from(SplitDescriptor::new(
381 0x6000,
382 0x1000,
383 VRING_DESC_F_NEXT as u16,
384 1,
385 ));
386 dtable.store(0, desc).unwrap();
387 let desc = RawDescriptor::from(SplitDescriptor::new(
390 0x7000,
391 0x1000,
392 (VRING_DESC_F_INDIRECT | VRING_DESC_F_NEXT) as u16,
393 2,
394 ));
395 dtable.store(1, desc).unwrap();
396 let desc = RawDescriptor::from(SplitDescriptor::new(0x8000, 0x1000, 0, 0));
397 dtable.store(2, desc).unwrap();
398
399 let mut c: DescriptorChain<&GuestMemoryMmap> = DescriptorChain::new(m, vq.start(), 16, 0);
400
401 let idtable = DescriptorTable::new(m, GuestAddress(0x7000), 4);
403 for i in 0..4u16 {
404 let desc: RawDescriptor = if i < 3 {
405 RawDescriptor::from(SplitDescriptor::new(
406 0x1000 * i as u64,
407 0x1000,
408 VRING_DESC_F_NEXT as u16,
409 i + 1,
410 ))
411 } else {
412 RawDescriptor::from(SplitDescriptor::new(0x1000 * i as u64, 0x1000, 0, 0))
413 };
414 idtable.store(i, desc).unwrap();
415 }
416
417 assert_eq!(c.head_index(), 0);
418 c.next().unwrap();
420
421 assert!(!c.is_indirect);
423
424 for i in 0..4 {
426 let desc = c.next().unwrap();
427 assert!(c.is_indirect);
428 if i < 3 {
429 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
430 assert_eq!(desc.next(), i + 1);
431 }
432 }
433 assert!(c.next().is_none());
436 }
437
438 #[test]
439 fn test_indirect_descriptor_address_noaligned() {
440 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
443 let vq = MockSplitQueue::new(m, 16);
444 let dtable = vq.desc_table();
445
446 let desc = RawDescriptor::from(SplitDescriptor::new(
448 0x7001,
449 0x1000,
450 (VRING_DESC_F_INDIRECT | VRING_DESC_F_NEXT) as u16,
451 2,
452 ));
453 dtable.store(0, desc).unwrap();
454
455 let mut c: DescriptorChain<&GuestMemoryMmap> = DescriptorChain::new(m, vq.start(), 16, 0);
456
457 let idtable = DescriptorTable::new(m, GuestAddress(0x7001), 4);
459 for i in 0..4u16 {
460 let desc: RawDescriptor = if i < 3 {
461 RawDescriptor::from(SplitDescriptor::new(
462 0x1000 * i as u64,
463 0x1000,
464 VRING_DESC_F_NEXT as u16,
465 i + 1,
466 ))
467 } else {
468 RawDescriptor::from(SplitDescriptor::new(0x1000 * i as u64, 0x1000, 0, 0))
469 };
470 idtable.store(i, desc).unwrap();
471 }
472
473 for i in 0..4 {
475 let desc = c.next().unwrap();
476 assert!(c.is_indirect);
477 if i < 3 {
478 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
479 assert_eq!(desc.next(), i + 1);
480 }
481 }
482 }
483
484 #[test]
485 fn test_indirect_descriptor_err() {
486 {
489 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
490 let vq = MockSplitQueue::new(m, 16);
491
492 let desc = RawDescriptor::from(SplitDescriptor::new(
495 0x1000,
496 0x1001,
497 VRING_DESC_F_INDIRECT as u16,
498 0,
499 ));
500 vq.desc_table().store(0, desc).unwrap();
501
502 let mut c: DescriptorChain<&GuestMemoryMmap> =
503 DescriptorChain::new(m, vq.start(), 16, 0);
504
505 assert!(c.next().is_none());
506 }
507
508 {
509 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
510 let vq = MockSplitQueue::new(m, 16);
511
512 let desc = RawDescriptor::from(SplitDescriptor::new(
515 0x1000,
516 (u16::MAX as u32 + 1) * VRING_DESC_ALIGN_SIZE,
517 VRING_DESC_F_INDIRECT as u16,
518 0,
519 ));
520 vq.desc_table().store(0, desc).unwrap();
521
522 let mut c: DescriptorChain<&GuestMemoryMmap> =
523 DescriptorChain::new(m, vq.start(), 16, 0);
524
525 assert!(c.next().is_none());
526 }
527
528 {
529 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
530 let vq = MockSplitQueue::new(m, 16);
531
532 let desc = RawDescriptor::from(SplitDescriptor::new(
534 0x1000,
535 0x1000,
536 VRING_DESC_F_INDIRECT as u16,
537 0,
538 ));
539 vq.desc_table().store(0, desc).unwrap();
540 let desc = RawDescriptor::from(SplitDescriptor::new(0x3000, 0x1000, 0, 0));
542 m.write_obj(desc, GuestAddress(0x1000)).unwrap();
543
544 let mut c: DescriptorChain<&GuestMemoryMmap> =
545 DescriptorChain::new(m, vq.start(), 16, 0);
546 assert!(c.next().is_some());
547
548 let desc = RawDescriptor::from(SplitDescriptor::new(
551 0x3000,
552 0x1000,
553 VRING_DESC_F_INDIRECT as u16,
554 0,
555 ));
556 m.write_obj(desc, GuestAddress(0x1000)).unwrap();
557
558 let mut c: DescriptorChain<&GuestMemoryMmap> =
559 DescriptorChain::new(m, vq.start(), 16, 0);
560
561 assert!(c.next().is_none());
562 }
563 }
564}