1use std::fmt::{self, Debug};
14use std::mem::size_of;
15use std::ops::Deref;
16
17use vm_memory::bitmap::{BitmapSlice, WithBitmapSlice};
18use vm_memory::{Address, Bytes, GuestAddress, GuestMemory, GuestMemoryRegion};
19
20use crate::{desc::split::Descriptor, Error, Reader, Writer};
21use virtio_bindings::bindings::virtio_ring::VRING_DESC_ALIGN_SIZE;
22
23#[derive(Clone, Debug)]
25pub struct DescriptorChain<M> {
26 mem: M,
27 desc_table: GuestAddress,
28 queue_size: u16,
29 head_index: u16,
30 next_index: u16,
31 ttl: u16,
32 yielded_bytes: u32,
33 is_indirect: bool,
34}
35
36impl<M> DescriptorChain<M>
37where
38 M: Deref,
39 M::Target: GuestMemory,
40{
41 fn with_ttl(
42 mem: M,
43 desc_table: GuestAddress,
44 queue_size: u16,
45 ttl: u16,
46 head_index: u16,
47 ) -> Self {
48 DescriptorChain {
49 mem,
50 desc_table,
51 queue_size,
52 head_index,
53 next_index: head_index,
54 ttl,
55 is_indirect: false,
56 yielded_bytes: 0,
57 }
58 }
59
60 pub(crate) fn new(mem: M, desc_table: GuestAddress, queue_size: u16, head_index: u16) -> Self {
69 Self::with_ttl(mem, desc_table, queue_size, queue_size, head_index)
70 }
71
72 pub fn head_index(&self) -> u16 {
74 self.head_index
75 }
76
77 pub fn memory(&self) -> &M::Target {
80 self.mem.deref()
81 }
82
83 pub fn readable(self) -> DescriptorChainRwIter<M> {
85 DescriptorChainRwIter {
86 chain: self,
87 writable: false,
88 }
89 }
90
91 pub fn writer<'a, B: BitmapSlice>(self, mem: &'a M::Target) -> Result<Writer<'a, B>, Error>
93 where
94 M::Target: Sized,
95 <<M::Target as GuestMemory>::R as GuestMemoryRegion>::B: WithBitmapSlice<'a, S = B>,
96 {
97 Writer::new(mem, self).map_err(|_| Error::InvalidChain)
98 }
99
100 pub fn reader<'a, B: BitmapSlice>(self, mem: &'a M::Target) -> Result<Reader<'a, B>, Error>
102 where
103 M::Target: Sized,
104 <<M::Target as GuestMemory>::R as GuestMemoryRegion>::B: WithBitmapSlice<'a, S = B>,
105 {
106 Reader::new(mem, self).map_err(|_| Error::InvalidChain)
107 }
108
109 pub fn writable(self) -> DescriptorChainRwIter<M> {
111 DescriptorChainRwIter {
112 chain: self,
113 writable: true,
114 }
115 }
116
117 fn switch_to_indirect_table(&mut self, desc: Descriptor) -> Result<(), Error> {
120 if self.is_indirect {
124 return Err(Error::InvalidIndirectDescriptor);
125 }
126
127 if desc.len() & (VRING_DESC_ALIGN_SIZE - 1) != 0 {
130 return Err(Error::InvalidIndirectDescriptorTable);
131 }
132
133 let table_len = desc.len() / VRING_DESC_ALIGN_SIZE;
136 if table_len > u32::from(u16::MAX) {
137 return Err(Error::InvalidIndirectDescriptorTable);
138 }
139
140 self.desc_table = desc.addr();
141 self.queue_size = u16::try_from(table_len).expect("invalid table_len");
143 self.next_index = 0;
144 self.ttl = self.queue_size;
145 self.is_indirect = true;
146
147 Ok(())
148 }
149}
150
151impl<M> Iterator for DescriptorChain<M>
152where
153 M: Deref,
154 M::Target: GuestMemory,
155{
156 type Item = Descriptor;
157
158 fn next(&mut self) -> Option<Self::Item> {
164 if self.ttl == 0 || self.next_index >= self.queue_size {
165 return None;
166 }
167
168 let desc_addr = self
169 .desc_table
170 .checked_add(self.next_index as u64 * size_of::<Descriptor>() as u64)?;
173
174 let desc = self.mem.read_obj::<Descriptor>(desc_addr).ok()?;
177
178 if desc.refers_to_indirect_table() {
179 self.switch_to_indirect_table(desc).ok()?;
180 return self.next();
181 }
182
183 match self.yielded_bytes.checked_add(desc.len()) {
188 Some(yielded_bytes) => self.yielded_bytes = yielded_bytes,
189 None => return None,
190 };
191
192 if desc.has_next() {
193 self.next_index = desc.next();
194 self.ttl -= 1;
197 } else {
198 self.ttl = 0;
199 }
200
201 Some(desc)
202 }
203}
204
205#[derive(Clone)]
207pub struct DescriptorChainRwIter<M> {
208 chain: DescriptorChain<M>,
209 writable: bool,
210}
211
212impl<M> Iterator for DescriptorChainRwIter<M>
213where
214 M: Deref,
215 M::Target: GuestMemory,
216{
217 type Item = Descriptor;
218
219 fn next(&mut self) -> Option<Self::Item> {
226 loop {
227 match self.chain.next() {
228 Some(v) => {
229 if v.is_write_only() == self.writable {
230 return Some(v);
231 }
232 }
233 None => return None,
234 }
235 }
236 }
237}
238
239impl<M> Debug for DescriptorChainRwIter<M>
241where
242 M: Debug,
243{
244 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
245 f.debug_struct("DescriptorChainRwIter")
246 .field("chain", &self.chain)
247 .field("writable", &self.writable)
248 .finish()
249 }
250}
251
252#[cfg(test)]
253mod tests {
254 use super::*;
255 use crate::desc::{split::Descriptor as SplitDescriptor, RawDescriptor};
256 use crate::mock::{DescriptorTable, MockSplitQueue};
257 use virtio_bindings::bindings::virtio_ring::{VRING_DESC_F_INDIRECT, VRING_DESC_F_NEXT};
258 use vm_memory::GuestMemoryMmap;
259
260 #[test]
261 fn test_checked_new_descriptor_chain() {
262 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
263 let vq = MockSplitQueue::new(m, 16);
264
265 assert!(vq.end().0 < 0x1000);
266
267 assert!(
269 DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 16)
270 .next()
271 .is_none()
272 );
273
274 assert!(
276 DescriptorChain::<&GuestMemoryMmap>::new(m, GuestAddress(0x00ff_ffff_ffff), 16, 0)
277 .next()
278 .is_none()
279 );
280
281 {
282 let desc = RawDescriptor::from(SplitDescriptor::new(
285 0x1000,
286 0x1000,
287 VRING_DESC_F_NEXT as u16,
288 16,
289 ));
290 vq.desc_table().store(0, desc).unwrap();
291
292 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 0);
293 c.next().unwrap();
294 assert!(c.next().is_none());
295 }
296
297 {
299 let desc = RawDescriptor::from(SplitDescriptor::new(
300 0x1000,
301 0x1000,
302 VRING_DESC_F_NEXT as u16,
303 1,
304 ));
305 vq.desc_table().store(0, desc).unwrap();
306
307 let desc = RawDescriptor::from(SplitDescriptor::new(0x2000, 0x1000, 0, 0));
308 vq.desc_table().store(1, desc).unwrap();
309
310 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 0);
311
312 assert_eq!(
313 c.memory() as *const GuestMemoryMmap,
314 m as *const GuestMemoryMmap
315 );
316
317 assert_eq!(c.desc_table, vq.start());
318 assert_eq!(c.queue_size, 16);
319 assert_eq!(c.ttl, c.queue_size);
320
321 let desc = c.next().unwrap();
322 assert_eq!(desc.addr(), GuestAddress(0x1000));
323 assert_eq!(desc.len(), 0x1000);
324 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
325 assert_eq!(desc.next(), 1);
326 assert_eq!(c.ttl, c.queue_size - 1);
327
328 assert!(c.next().is_some());
329 assert_eq!(c.ttl, 0);
331 assert!(c.next().is_none());
332 assert_eq!(c.ttl, 0);
333 }
334 }
335
336 #[test]
337 fn test_ttl_wrap_around() {
338 const QUEUE_SIZE: u16 = 16;
339
340 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x100000)]).unwrap();
341 let vq = MockSplitQueue::new(m, QUEUE_SIZE);
342
343 for i in 0..QUEUE_SIZE - 1 {
346 let desc = RawDescriptor::from(SplitDescriptor::new(
347 0x1000 * (i + 1) as u64,
348 0x1000,
349 VRING_DESC_F_NEXT as u16,
350 i + 1,
351 ));
352 vq.desc_table().store(i, desc).unwrap();
353 }
354 let desc = RawDescriptor::from(SplitDescriptor::new((0x1000 * 16) as u64, 0x1000, 0, 0));
355 vq.desc_table().store(QUEUE_SIZE - 1, desc).unwrap();
356
357 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), QUEUE_SIZE, 0);
358 assert_eq!(c.ttl, c.queue_size);
359
360 for i in 0..QUEUE_SIZE {
362 let _desc = c.next().unwrap();
363 assert_eq!(c.ttl, c.queue_size - i - 1);
364 }
365 assert!(c.next().is_none());
366 }
367
368 #[test]
369 fn test_new_from_indirect_descriptor() {
370 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
375 let vq = MockSplitQueue::new(m, 16);
376 let dtable = vq.desc_table();
377
378 let desc = RawDescriptor::from(SplitDescriptor::new(
380 0x6000,
381 0x1000,
382 VRING_DESC_F_NEXT as u16,
383 1,
384 ));
385 dtable.store(0, desc).unwrap();
386 let desc = RawDescriptor::from(SplitDescriptor::new(
389 0x7000,
390 0x1000,
391 (VRING_DESC_F_INDIRECT | VRING_DESC_F_NEXT) as u16,
392 2,
393 ));
394 dtable.store(1, desc).unwrap();
395 let desc = RawDescriptor::from(SplitDescriptor::new(0x8000, 0x1000, 0, 0));
396 dtable.store(2, desc).unwrap();
397
398 let mut c: DescriptorChain<&GuestMemoryMmap> = DescriptorChain::new(m, vq.start(), 16, 0);
399
400 let idtable = DescriptorTable::new(m, GuestAddress(0x7000), 4);
402 for i in 0..4u16 {
403 let desc: RawDescriptor = if i < 3 {
404 RawDescriptor::from(SplitDescriptor::new(
405 0x1000 * i as u64,
406 0x1000,
407 VRING_DESC_F_NEXT as u16,
408 i + 1,
409 ))
410 } else {
411 RawDescriptor::from(SplitDescriptor::new(0x1000 * i as u64, 0x1000, 0, 0))
412 };
413 idtable.store(i, desc).unwrap();
414 }
415
416 assert_eq!(c.head_index(), 0);
417 c.next().unwrap();
419
420 assert!(!c.is_indirect);
422
423 for i in 0..4 {
425 let desc = c.next().unwrap();
426 assert!(c.is_indirect);
427 if i < 3 {
428 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
429 assert_eq!(desc.next(), i + 1);
430 }
431 }
432 assert!(c.next().is_none());
435 }
436
437 #[test]
438 fn test_indirect_descriptor_address_noaligned() {
439 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
442 let vq = MockSplitQueue::new(m, 16);
443 let dtable = vq.desc_table();
444
445 let desc = RawDescriptor::from(SplitDescriptor::new(
447 0x7001,
448 0x1000,
449 (VRING_DESC_F_INDIRECT | VRING_DESC_F_NEXT) as u16,
450 2,
451 ));
452 dtable.store(0, desc).unwrap();
453
454 let mut c: DescriptorChain<&GuestMemoryMmap> = DescriptorChain::new(m, vq.start(), 16, 0);
455
456 let idtable = DescriptorTable::new(m, GuestAddress(0x7001), 4);
458 for i in 0..4u16 {
459 let desc: RawDescriptor = if i < 3 {
460 RawDescriptor::from(SplitDescriptor::new(
461 0x1000 * i as u64,
462 0x1000,
463 VRING_DESC_F_NEXT as u16,
464 i + 1,
465 ))
466 } else {
467 RawDescriptor::from(SplitDescriptor::new(0x1000 * i as u64, 0x1000, 0, 0))
468 };
469 idtable.store(i, desc).unwrap();
470 }
471
472 for i in 0..4 {
474 let desc = c.next().unwrap();
475 assert!(c.is_indirect);
476 if i < 3 {
477 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
478 assert_eq!(desc.next(), i + 1);
479 }
480 }
481 }
482
483 #[test]
484 fn test_indirect_descriptor_err() {
485 {
488 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
489 let vq = MockSplitQueue::new(m, 16);
490
491 let desc = RawDescriptor::from(SplitDescriptor::new(
494 0x1000,
495 0x1001,
496 VRING_DESC_F_INDIRECT as u16,
497 0,
498 ));
499 vq.desc_table().store(0, desc).unwrap();
500
501 let mut c: DescriptorChain<&GuestMemoryMmap> =
502 DescriptorChain::new(m, vq.start(), 16, 0);
503
504 assert!(c.next().is_none());
505 }
506
507 {
508 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
509 let vq = MockSplitQueue::new(m, 16);
510
511 let desc = RawDescriptor::from(SplitDescriptor::new(
514 0x1000,
515 (u16::MAX as u32 + 1) * VRING_DESC_ALIGN_SIZE,
516 VRING_DESC_F_INDIRECT as u16,
517 0,
518 ));
519 vq.desc_table().store(0, desc).unwrap();
520
521 let mut c: DescriptorChain<&GuestMemoryMmap> =
522 DescriptorChain::new(m, vq.start(), 16, 0);
523
524 assert!(c.next().is_none());
525 }
526
527 {
528 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
529 let vq = MockSplitQueue::new(m, 16);
530
531 let desc = RawDescriptor::from(SplitDescriptor::new(
533 0x1000,
534 0x1000,
535 VRING_DESC_F_INDIRECT as u16,
536 0,
537 ));
538 vq.desc_table().store(0, desc).unwrap();
539 let desc = RawDescriptor::from(SplitDescriptor::new(0x3000, 0x1000, 0, 0));
541 m.write_obj(desc, GuestAddress(0x1000)).unwrap();
542
543 let mut c: DescriptorChain<&GuestMemoryMmap> =
544 DescriptorChain::new(m, vq.start(), 16, 0);
545 assert!(c.next().is_some());
546
547 let desc = RawDescriptor::from(SplitDescriptor::new(
550 0x3000,
551 0x1000,
552 VRING_DESC_F_INDIRECT as u16,
553 0,
554 ));
555 m.write_obj(desc, GuestAddress(0x1000)).unwrap();
556
557 let mut c: DescriptorChain<&GuestMemoryMmap> =
558 DescriptorChain::new(m, vq.start(), 16, 0);
559
560 assert!(c.next().is_none());
561 }
562 }
563}