1use std::fmt::{self, Debug};
14use std::mem::size_of;
15use std::ops::Deref;
16
17use vm_memory::bitmap::{BitmapSlice, WithBitmapSlice};
18use vm_memory::{Address, Bytes, GuestAddress, GuestMemory, GuestMemoryRegion};
19
20use crate::{Descriptor, Error, Reader, Writer};
21use virtio_bindings::bindings::virtio_ring::VRING_DESC_ALIGN_SIZE;
22
23#[derive(Clone, Debug)]
25pub struct DescriptorChain<M> {
26 mem: M,
27 desc_table: GuestAddress,
28 queue_size: u16,
29 head_index: u16,
30 next_index: u16,
31 ttl: u16,
32 yielded_bytes: u32,
33 is_indirect: bool,
34}
35
36impl<M> DescriptorChain<M>
37where
38 M: Deref,
39 M::Target: GuestMemory,
40{
41 fn with_ttl(
42 mem: M,
43 desc_table: GuestAddress,
44 queue_size: u16,
45 ttl: u16,
46 head_index: u16,
47 ) -> Self {
48 DescriptorChain {
49 mem,
50 desc_table,
51 queue_size,
52 head_index,
53 next_index: head_index,
54 ttl,
55 is_indirect: false,
56 yielded_bytes: 0,
57 }
58 }
59
60 pub(crate) fn new(mem: M, desc_table: GuestAddress, queue_size: u16, head_index: u16) -> Self {
70 Self::with_ttl(mem, desc_table, queue_size, queue_size, head_index)
71 }
72
73 pub fn head_index(&self) -> u16 {
75 self.head_index
76 }
77
78 pub fn memory(&self) -> &M::Target {
81 self.mem.deref()
82 }
83
84 pub fn readable(self) -> DescriptorChainRwIter<M> {
86 DescriptorChainRwIter {
87 chain: self,
88 writable: false,
89 }
90 }
91
92 pub fn writer<'a, B: BitmapSlice>(self, mem: &'a M::Target) -> Result<Writer<'a, B>, Error>
94 where
95 M::Target: Sized,
96 <<M::Target as GuestMemory>::R as GuestMemoryRegion>::B: WithBitmapSlice<'a, S = B>,
97 {
98 Writer::new(mem, self).map_err(|_| Error::InvalidChain)
99 }
100
101 pub fn reader<'a, B: BitmapSlice>(self, mem: &'a M::Target) -> Result<Reader<'a, B>, Error>
103 where
104 M::Target: Sized,
105 <<M::Target as GuestMemory>::R as GuestMemoryRegion>::B: WithBitmapSlice<'a, S = B>,
106 {
107 Reader::new(mem, self).map_err(|_| Error::InvalidChain)
108 }
109
110 pub fn writable(self) -> DescriptorChainRwIter<M> {
112 DescriptorChainRwIter {
113 chain: self,
114 writable: true,
115 }
116 }
117
118 fn switch_to_indirect_table(&mut self, desc: Descriptor) -> Result<(), Error> {
121 if self.is_indirect {
125 return Err(Error::InvalidIndirectDescriptor);
126 }
127
128 if desc.len() & (VRING_DESC_ALIGN_SIZE - 1) != 0 {
131 return Err(Error::InvalidIndirectDescriptorTable);
132 }
133
134 let table_len = desc.len() / VRING_DESC_ALIGN_SIZE;
137 if table_len > u32::from(u16::MAX) {
138 return Err(Error::InvalidIndirectDescriptorTable);
139 }
140
141 self.desc_table = desc.addr();
142 self.queue_size = u16::try_from(table_len).expect("invalid table_len");
144 self.next_index = 0;
145 self.ttl = self.queue_size;
146 self.is_indirect = true;
147
148 Ok(())
149 }
150}
151
152impl<M> Iterator for DescriptorChain<M>
153where
154 M: Deref,
155 M::Target: GuestMemory,
156{
157 type Item = Descriptor;
158
159 fn next(&mut self) -> Option<Self::Item> {
165 if self.ttl == 0 || self.next_index >= self.queue_size {
166 return None;
167 }
168
169 let desc_addr = self
170 .desc_table
171 .checked_add(self.next_index as u64 * size_of::<Descriptor>() as u64)?;
174
175 let desc = self.mem.read_obj::<Descriptor>(desc_addr).ok()?;
178
179 if desc.refers_to_indirect_table() {
180 self.switch_to_indirect_table(desc).ok()?;
181 return self.next();
182 }
183
184 match self.yielded_bytes.checked_add(desc.len()) {
189 Some(yielded_bytes) => self.yielded_bytes = yielded_bytes,
190 None => return None,
191 };
192
193 if desc.has_next() {
194 self.next_index = desc.next();
195 self.ttl -= 1;
198 } else {
199 self.ttl = 0;
200 }
201
202 Some(desc)
203 }
204}
205
206#[derive(Clone)]
208pub struct DescriptorChainRwIter<M> {
209 chain: DescriptorChain<M>,
210 writable: bool,
211}
212
213impl<M> Iterator for DescriptorChainRwIter<M>
214where
215 M: Deref,
216 M::Target: GuestMemory,
217{
218 type Item = Descriptor;
219
220 fn next(&mut self) -> Option<Self::Item> {
227 loop {
228 match self.chain.next() {
229 Some(v) => {
230 if v.is_write_only() == self.writable {
231 return Some(v);
232 }
233 }
234 None => return None,
235 }
236 }
237 }
238}
239
240impl<M> Debug for DescriptorChainRwIter<M>
242where
243 M: Debug,
244{
245 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
246 f.debug_struct("DescriptorChainRwIter")
247 .field("chain", &self.chain)
248 .field("writable", &self.writable)
249 .finish()
250 }
251}
252
253#[cfg(test)]
254mod tests {
255 use super::*;
256 use crate::mock::{DescriptorTable, MockSplitQueue};
257 use virtio_bindings::bindings::virtio_ring::{VRING_DESC_F_INDIRECT, VRING_DESC_F_NEXT};
258 use vm_memory::GuestMemoryMmap;
259
260 #[test]
261 fn test_checked_new_descriptor_chain() {
262 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
263 let vq = MockSplitQueue::new(m, 16);
264
265 assert!(vq.end().0 < 0x1000);
266
267 assert!(
269 DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 16)
270 .next()
271 .is_none()
272 );
273
274 assert!(
276 DescriptorChain::<&GuestMemoryMmap>::new(m, GuestAddress(0x00ff_ffff_ffff), 16, 0)
277 .next()
278 .is_none()
279 );
280
281 {
282 let desc = Descriptor::new(0x1000, 0x1000, VRING_DESC_F_NEXT as u16, 16);
285 vq.desc_table().store(0, desc).unwrap();
286
287 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 0);
288 c.next().unwrap();
289 assert!(c.next().is_none());
290 }
291
292 {
294 let desc = Descriptor::new(0x1000, 0x1000, VRING_DESC_F_NEXT as u16, 1);
295 vq.desc_table().store(0, desc).unwrap();
296
297 let desc = Descriptor::new(0x2000, 0x1000, 0, 0);
298 vq.desc_table().store(1, desc).unwrap();
299
300 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), 16, 0);
301
302 assert_eq!(
303 c.memory() as *const GuestMemoryMmap,
304 m as *const GuestMemoryMmap
305 );
306
307 assert_eq!(c.desc_table, vq.start());
308 assert_eq!(c.queue_size, 16);
309 assert_eq!(c.ttl, c.queue_size);
310
311 let desc = c.next().unwrap();
312 assert_eq!(desc.addr(), GuestAddress(0x1000));
313 assert_eq!(desc.len(), 0x1000);
314 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
315 assert_eq!(desc.next(), 1);
316 assert_eq!(c.ttl, c.queue_size - 1);
317
318 assert!(c.next().is_some());
319 assert_eq!(c.ttl, 0);
321 assert!(c.next().is_none());
322 assert_eq!(c.ttl, 0);
323 }
324 }
325
326 #[test]
327 fn test_ttl_wrap_around() {
328 const QUEUE_SIZE: u16 = 16;
329
330 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x100000)]).unwrap();
331 let vq = MockSplitQueue::new(m, QUEUE_SIZE);
332
333 for i in 0..QUEUE_SIZE - 1 {
336 let desc = Descriptor::new(
337 0x1000 * (i + 1) as u64,
338 0x1000,
339 VRING_DESC_F_NEXT as u16,
340 i + 1,
341 );
342 vq.desc_table().store(i, desc).unwrap();
343 }
344 let desc = Descriptor::new((0x1000 * 16) as u64, 0x1000, 0, 0);
345 vq.desc_table().store(QUEUE_SIZE - 1, desc).unwrap();
346
347 let mut c = DescriptorChain::<&GuestMemoryMmap>::new(m, vq.start(), QUEUE_SIZE, 0);
348 assert_eq!(c.ttl, c.queue_size);
349
350 for i in 0..QUEUE_SIZE {
352 let _desc = c.next().unwrap();
353 assert_eq!(c.ttl, c.queue_size - i - 1);
354 }
355 assert!(c.next().is_none());
356 }
357
358 #[test]
359 fn test_new_from_indirect_descriptor() {
360 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
365 let vq = MockSplitQueue::new(m, 16);
366 let dtable = vq.desc_table();
367
368 let desc = Descriptor::new(0x6000, 0x1000, VRING_DESC_F_NEXT as u16, 1);
370 dtable.store(0, desc).unwrap();
371 let desc = Descriptor::new(
374 0x7000,
375 0x1000,
376 (VRING_DESC_F_INDIRECT | VRING_DESC_F_NEXT) as u16,
377 2,
378 );
379 dtable.store(1, desc).unwrap();
380 let desc = Descriptor::new(0x8000, 0x1000, 0, 0);
381 dtable.store(2, desc).unwrap();
382
383 let mut c: DescriptorChain<&GuestMemoryMmap> = DescriptorChain::new(m, vq.start(), 16, 0);
384
385 let idtable = DescriptorTable::new(m, GuestAddress(0x7000), 4);
387 for i in 0..4u16 {
388 let desc: Descriptor = if i < 3 {
389 Descriptor::new(0x1000 * i as u64, 0x1000, VRING_DESC_F_NEXT as u16, i + 1)
390 } else {
391 Descriptor::new(0x1000 * i as u64, 0x1000, 0, 0)
392 };
393 idtable.store(i, desc).unwrap();
394 }
395
396 assert_eq!(c.head_index(), 0);
397 c.next().unwrap();
399
400 assert!(!c.is_indirect);
402
403 for i in 0..4 {
405 let desc = c.next().unwrap();
406 assert!(c.is_indirect);
407 if i < 3 {
408 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
409 assert_eq!(desc.next(), i + 1);
410 }
411 }
412 assert!(c.next().is_none());
415 }
416
417 #[test]
418 fn test_indirect_descriptor_address_noaligned() {
419 let m = &GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
422 let vq = MockSplitQueue::new(m, 16);
423 let dtable = vq.desc_table();
424
425 let desc = Descriptor::new(
427 0x7001,
428 0x1000,
429 (VRING_DESC_F_INDIRECT | VRING_DESC_F_NEXT) as u16,
430 2,
431 );
432 dtable.store(0, desc).unwrap();
433
434 let mut c: DescriptorChain<&GuestMemoryMmap> = DescriptorChain::new(m, vq.start(), 16, 0);
435
436 let idtable = DescriptorTable::new(m, GuestAddress(0x7001), 4);
438 for i in 0..4u16 {
439 let desc: Descriptor = if i < 3 {
440 Descriptor::new(0x1000 * i as u64, 0x1000, VRING_DESC_F_NEXT as u16, i + 1)
441 } else {
442 Descriptor::new(0x1000 * i as u64, 0x1000, 0, 0)
443 };
444 idtable.store(i, desc).unwrap();
445 }
446
447 for i in 0..4 {
449 let desc = c.next().unwrap();
450 assert!(c.is_indirect);
451 if i < 3 {
452 assert_eq!(desc.flags(), VRING_DESC_F_NEXT as u16);
453 assert_eq!(desc.next(), i + 1);
454 }
455 }
456 }
457
458 #[test]
459 fn test_indirect_descriptor_err() {
460 {
463 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
464 let vq = MockSplitQueue::new(m, 16);
465
466 let desc = Descriptor::new(0x1000, 0x1001, VRING_DESC_F_INDIRECT as u16, 0);
469 vq.desc_table().store(0, desc).unwrap();
470
471 let mut c: DescriptorChain<&GuestMemoryMmap> =
472 DescriptorChain::new(m, vq.start(), 16, 0);
473
474 assert!(c.next().is_none());
475 }
476
477 {
478 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
479 let vq = MockSplitQueue::new(m, 16);
480
481 let desc = Descriptor::new(
484 0x1000,
485 (u16::MAX as u32 + 1) * VRING_DESC_ALIGN_SIZE,
486 VRING_DESC_F_INDIRECT as u16,
487 0,
488 );
489 vq.desc_table().store(0, desc).unwrap();
490
491 let mut c: DescriptorChain<&GuestMemoryMmap> =
492 DescriptorChain::new(m, vq.start(), 16, 0);
493
494 assert!(c.next().is_none());
495 }
496
497 {
498 let m = &GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap();
499 let vq = MockSplitQueue::new(m, 16);
500
501 let desc = Descriptor::new(0x1000, 0x1000, VRING_DESC_F_INDIRECT as u16, 0);
503 vq.desc_table().store(0, desc).unwrap();
504 let desc = Descriptor::new(0x3000, 0x1000, 0, 0);
506 m.write_obj(desc, GuestAddress(0x1000)).unwrap();
507
508 let mut c: DescriptorChain<&GuestMemoryMmap> =
509 DescriptorChain::new(m, vq.start(), 16, 0);
510 assert!(c.next().is_some());
511
512 let desc = Descriptor::new(0x3000, 0x1000, VRING_DESC_F_INDIRECT as u16, 0);
515 m.write_obj(desc, GuestAddress(0x1000)).unwrap();
516
517 let mut c: DescriptorChain<&GuestMemoryMmap> =
518 DescriptorChain::new(m, vq.start(), 16, 0);
519
520 assert!(c.next().is_none());
521 }
522 }
523}