1#![allow(missing_docs)]
2
3use std::borrow::Cow;
4use std::fmt;
5
6use bitflags::bitflags;
7use perf_event_open_sys::bindings;
8use perf_event_open_sys::bindings::__BindgenBitfieldUnit;
9use perf_event_open_sys::bindings::perf_branch_entry;
10use perf_event_open_sys::bindings::perf_mem_data_src;
11
12use crate::parse::ParseError;
13use crate::prelude::*;
14use crate::ReadGroup;
15use crate::ReadValue;
16
17mod sample_impl {
18 use super::*;
19
20 option_struct! {
25 pub(super) struct Sample<'a>: u32 {
26 pub ip: u64,
27 pub pid: u32,
28 pub tid: u32,
29 pub time: u64,
30 #[debug(with = crate::util::fmt::HexAddr)]
31 pub addr: u64,
32 pub id: u64,
33 pub stream_id: u64,
34 pub cpu: u32,
35 pub period: u64,
36 pub values: ReadGroup<'a>,
37 pub callchain: Cow<'a, [u64]>,
38 pub raw: Cow<'a, [u8]>,
39 pub lbr_hw_index: u64,
40 pub lbr: Cow<'a, [BranchEntry]>,
41 pub regs_user: Registers<'a>,
42 pub stack_user: Cow<'a, [u8]>,
43 pub weight: u64,
44 pub data_src: DataSource,
45 pub transaction: Txn,
46 pub regs_intr: Registers<'a>,
47 #[debug(with = crate::util::fmt::HexAddr)]
48 pub phys_addr: u64,
49 pub aux: Cow<'a, [u8]>,
50 pub cgroup: u64,
51 pub data_page_size: u64,
52 pub code_page_size: u64
53 }
54 }
55}
56
57#[derive(Clone)]
64pub struct Sample<'a>(sample_impl::Sample<'a>);
65
66#[allow(missing_docs)]
67impl<'a> Sample<'a> {
68 pub fn id(&self) -> Option<u64> {
69 self.0.id().copied()
70 }
71
72 pub fn ip(&self) -> Option<u64> {
73 self.0.ip().copied()
74 }
75
76 pub fn pid(&self) -> Option<u32> {
77 self.0.pid().copied()
78 }
79
80 pub fn tid(&self) -> Option<u32> {
81 self.0.tid().copied()
82 }
83
84 pub fn time(&self) -> Option<u64> {
85 self.0.time().copied()
86 }
87
88 pub fn addr(&self) -> Option<u64> {
89 self.0.addr().copied()
90 }
91
92 pub fn stream_id(&self) -> Option<u64> {
93 self.0.stream_id().copied()
94 }
95
96 pub fn cpu(&self) -> Option<u32> {
97 self.0.cpu().copied()
98 }
99
100 pub fn period(&self) -> Option<u64> {
101 self.0.period().copied()
102 }
103
104 pub fn values(&self) -> Option<&ReadGroup<'a>> {
105 self.0.values()
106 }
107
108 pub fn callchain(&self) -> Option<&[u64]> {
109 self.0.callchain().map(|cow| &**cow)
110 }
111
112 pub fn raw(&self) -> Option<&[u8]> {
113 self.0.raw().map(|cow| &**cow)
114 }
115
116 pub fn lbr_hw_index(&self) -> Option<u64> {
117 self.0.lbr_hw_index().copied()
118 }
119
120 pub fn lbr(&self) -> Option<&[BranchEntry]> {
121 self.0.lbr().map(|cow| &**cow)
122 }
123
124 pub fn regs_user(&self) -> Option<&Registers<'a>> {
125 self.0.regs_user()
126 }
127
128 pub fn stack_user(&self) -> Option<&[u8]> {
129 self.0.stack_user().map(|cow| &**cow)
130 }
131
132 pub fn weight(&self) -> Option<u64> {
133 self.0.weight().copied()
134 }
135
136 pub fn data_src(&self) -> Option<DataSource> {
137 self.0.data_src().copied()
138 }
139
140 pub fn transaction(&self) -> Option<Txn> {
141 self.0.transaction().copied()
142 }
143
144 pub fn regs_intr(&self) -> Option<&Registers<'a>> {
145 self.0.regs_intr()
146 }
147
148 pub fn phys_addr(&self) -> Option<u64> {
149 self.0.phys_addr().copied()
150 }
151
152 pub fn aux(&self) -> Option<&[u8]> {
153 self.0.aux().map(|cow| &**cow)
154 }
155
156 pub fn cgroup(&self) -> Option<u64> {
157 self.0.cgroup().copied()
158 }
159
160 pub fn data_page_size(&self) -> Option<u64> {
161 self.0.data_page_size().copied()
162 }
163
164 pub fn code_page_size(&self) -> Option<u64> {
165 self.0.code_page_size().copied()
166 }
167}
168
169impl<'p> Parse<'p> for Sample<'p> {
170 fn parse<B, E>(p: &mut Parser<B, E>) -> ParseResult<Self>
171 where
172 E: Endian,
173 B: ParseBuf<'p>,
174 {
175 let config = p.config();
176 let sty = config.sample_type();
177 let branch_hw_index = config.branch_hw_index();
178
179 let id = p.parse_if(sty.contains(SampleFlags::IDENTIFIER))?;
180 let ip = p.parse_if(sty.contains(SampleFlags::IP))?;
181 let pid = p.parse_if(sty.contains(SampleFlags::TID))?;
182 let tid = p.parse_if(sty.contains(SampleFlags::TID))?;
183 let time = p.parse_if(sty.contains(SampleFlags::TIME))?;
184 let addr = p.parse_if(sty.contains(SampleFlags::ADDR))?;
185 let id = p.parse_if(sty.contains(SampleFlags::ID))?.or(id);
186 let stream_id = p.parse_if(sty.contains(SampleFlags::STREAM_ID))?;
187 let cpu = p.parse_if_with(sty.contains(SampleFlags::CPU), |p| {
188 Ok((p.parse_u32()?, p.parse_u32()?).0)
189 })?;
190 let period = p.parse_if(sty.contains(SampleFlags::PERIOD))?;
191 let values = p.parse_if_with(sty.contains(SampleFlags::READ), |p| {
192 if p.config().read_format().contains(ReadFormat::GROUP) {
193 p.parse()
194 } else {
195 ReadValue::parse(p).map(From::from)
196 }
197 })?;
198 let callchain = p.parse_if_with(sty.contains(SampleFlags::CALLCHAIN), |p| {
199 let nr = p.parse_u64()? as _;
200 unsafe { p.parse_slice(nr) }
201 })?;
202 let raw = p.parse_if_with(sty.contains(SampleFlags::RAW), |p| {
203 p.parse_padded(std::mem::size_of::<u64>(), |p| {
204 let size = p.parse_u32()? as _;
205 p.parse_bytes(size)
206 })
207 })?;
208 let lbr = p.parse_if_with(sty.contains(SampleFlags::BRANCH_STACK), |p| {
209 let nr = p.parse_u64()? as usize;
210 let hw_index = p.parse_if(branch_hw_index)?;
211 let lbr = unsafe { p.parse_slice(nr)? };
212
213 Ok((lbr, hw_index))
214 })?;
215 let (lbr, lbr_hw_index) = match lbr {
216 Some((lbr, hw_index)) => (Some(lbr), hw_index),
217 _ => (None, None),
218 };
219 let regs_user = p.parse_if_with(sty.contains(SampleFlags::REGS_USER), |p| {
220 Registers::parse_user(p)
221 })?;
222 let stack_user = p.parse_if_with(sty.contains(SampleFlags::STACK_USER), |p| {
223 let size = p.parse_u64()? as usize;
224
225 let mut data = p.parse_bytes(size)?;
226
227 let dyn_size = match size {
229 0 => 0,
230 _ => p.parse_u64()? as usize,
231 };
232
233 if dyn_size > data.len() {
234 return Err(ParseError::custom(
235 ErrorKind::InvalidRecord,
236 "stack dyn_size was greater than the record size",
237 ));
238 }
239
240 match &mut data {
241 Cow::Owned(data) => data.truncate(dyn_size),
242 Cow::Borrowed(data) => *data = &data[..dyn_size],
243 }
244
245 Ok(data)
246 })?;
247 let weight = p.parse_if(sty.contains(SampleFlags::WEIGHT))?;
248 let data_src = p.parse_if(sty.contains(SampleFlags::DATA_SRC))?;
249 let transaction = p.parse_if(sty.contains(SampleFlags::TRANSACTION))?;
250 let regs_intr = p.parse_if_with(sty.contains(SampleFlags::REGS_INTR), |p| {
251 Registers::parse_intr(p)
252 })?;
253 let phys_addr = p.parse_if(sty.contains(SampleFlags::PHYS_ADDR))?;
254 let cgroup = p.parse_if(sty.contains(SampleFlags::CGROUP))?;
255 let data_page_size = p.parse_if(sty.contains(SampleFlags::DATA_PAGE_SIZE))?;
256 let code_page_size = p.parse_if(sty.contains(SampleFlags::CODE_PAGE_SIZE))?;
257 let aux = p.parse_if_with(sty.contains(SampleFlags::AUX), |p| {
258 let size = p.parse_u64()? as usize;
259 p.parse_bytes(size)
260 })?;
261
262 Ok(Self(sample_impl::Sample::new(
263 ip,
264 pid,
265 tid,
266 time,
267 addr,
268 id,
269 stream_id,
270 cpu,
271 period,
272 values,
273 callchain,
274 raw,
275 lbr_hw_index,
276 lbr,
277 regs_user,
278 stack_user,
279 weight,
280 data_src,
281 transaction,
282 regs_intr,
283 phys_addr,
284 aux,
285 cgroup,
286 data_page_size,
287 code_page_size,
288 )))
289 }
290}
291
292impl fmt::Debug for Sample<'_> {
293 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
294 self.0.fmt(f)
295 }
296}
297
298#[derive(Clone, Debug)]
304pub struct Registers<'a> {
305 pub abi: SampleRegsAbi,
307
308 pub mask: u64,
312
313 pub regs: Cow<'a, [u64]>,
315}
316
317c_enum! {
318 #[derive(Copy, Clone, Eq, PartialEq, Hash)]
320 pub enum SampleRegsAbi : u64 {
321 NONE = bindings::PERF_SAMPLE_REGS_ABI_NONE as _,
322 ABI_32 = bindings::PERF_SAMPLE_REGS_ABI_32 as _,
323 ABI_64 = bindings::PERF_SAMPLE_REGS_ABI_64 as _,
324 }
325}
326
327impl SampleRegsAbi {
328 pub const fn new(value: u64) -> Self {
330 Self(value)
331 }
332}
333
334impl<'p> Registers<'p> {
335 pub fn parse_user<B, E>(p: &mut Parser<B, E>) -> ParseResult<Self>
337 where
338 E: Endian,
339 B: ParseBuf<'p>,
340 {
341 Self::parse(p, p.config().regs_user())
342 }
343
344 pub fn parse_intr<B, E>(p: &mut Parser<B, E>) -> ParseResult<Self>
346 where
347 E: Endian,
348 B: ParseBuf<'p>,
349 {
350 Self::parse(p, p.config().regs_intr())
351 }
352
353 fn parse<B, E>(p: &mut Parser<B, E>, mask: u64) -> ParseResult<Self>
354 where
355 E: Endian,
356 B: ParseBuf<'p>,
357 {
358 Ok(Self {
359 abi: p.parse()?,
360 mask,
361 regs: unsafe { p.parse_slice(mask.count_ones() as _)? },
362 })
363 }
364}
365
366impl<'p> Parse<'p> for SampleRegsAbi {
367 fn parse<B, E>(p: &mut Parser<B, E>) -> ParseResult<Self>
368 where
369 E: Endian,
370 B: ParseBuf<'p>,
371 {
372 Ok(Self::new(p.parse()?))
373 }
374}
375
376c_enum! {
377 #[derive(Copy, Clone, Eq, PartialEq, Hash)]
384 pub enum BranchType : u8 {
385 UNKNOWN = bindings::PERF_BR_UNKNOWN as _,
386 COND = bindings::PERF_BR_COND as _,
387 UNCOND = bindings::PERF_BR_UNCOND as _,
388 IND = bindings::PERF_BR_IND as _,
389 CALL = bindings::PERF_BR_CALL as _,
390 IND_CALL = bindings::PERF_BR_IND_CALL as _,
391 RET = bindings::PERF_BR_RET as _,
392 SYSCALL = bindings::PERF_BR_SYSCALL as _,
393 COND_CALL = bindings::PERF_BR_COND_CALL as _,
394 COND_RET = bindings::PERF_BR_COND_RET as _,
395 }
396}
397
398impl BranchType {
399 pub const fn new(value: u8) -> Self {
401 Self(value)
402 }
403}
404
405#[derive(Copy, Clone, Debug)]
407pub struct BranchEntry(perf_branch_entry);
408
409impl BranchEntry {
410 pub fn from(&self) -> u64 {
414 self.0.from
415 }
416
417 pub fn to(&self) -> u64 {
419 self.0.to
420 }
421
422 pub fn mispred(&self) -> bool {
424 self.0.mispred() != 0
425 }
426
427 pub fn predicted(&self) -> bool {
429 self.0.predicted() != 0
430 }
431
432 pub fn in_tx(&self) -> bool {
434 self.0.in_tx() != 0
435 }
436
437 pub fn abort(&self) -> bool {
439 self.0.abort() != 0
440 }
441
442 pub fn cycles(&self) -> u16 {
444 self.0.cycles() as _
445 }
446
447 pub fn ty(&self) -> BranchType {
452 BranchType(self.0.type_() as _)
453 }
454}
455
456impl<'p> Parse<'p> for BranchEntry {
457 fn parse<B, E>(p: &mut Parser<B, E>) -> ParseResult<Self>
458 where
459 E: Endian,
460 B: ParseBuf<'p>,
461 {
462 Ok(Self(perf_branch_entry {
463 from: p.parse()?,
464 to: p.parse()?,
465 _bitfield_align_1: [],
466 _bitfield_1: __BindgenBitfieldUnit::new(u64::to_ne_bytes(p.parse()?)),
467 }))
468 }
469}
470
471#[derive(Copy, Clone, Default)]
477pub struct DataSource(perf_mem_data_src);
478
479impl DataSource {
480 fn bitfield(&self) -> &bindings::perf_mem_data_src__bindgen_ty_1 {
481 unsafe { &self.0.__bindgen_anon_1 }
482 }
483
484 pub fn mem_op(&self) -> MemOp {
486 MemOp::from_bits_retain(self.bitfield().mem_op())
487 }
488
489 pub fn mem_lvl(&self) -> MemLevel {
491 MemLevel::from_bits_retain(self.bitfield().mem_lvl())
492 }
493
494 pub fn mem_snoop(&self) -> MemSnoop {
499 MemSnoop::new(self.bitfield().mem_snoop(), self.bitfield().mem_snoopx())
500 }
501
502 pub fn mem_lock(&self) -> MemLock {
504 MemLock::from_bits_retain(self.bitfield().mem_lock())
505 }
506
507 pub fn mem_dtlb(&self) -> MemDtlb {
509 MemDtlb::from_bits_retain(self.bitfield().mem_dtlb())
510 }
511
512 pub fn mem_lvl_num(&self) -> MemLevelNum {
519 MemLevelNum(self.bitfield().mem_lvl_num() as _)
520 }
521
522 pub fn mem_remote(&self) -> bool {
529 self.bitfield().mem_remote() != 0
530 }
531
532 pub fn mem_blk(&self) -> MemBlk {
539 MemBlk::from_bits_retain(self.bitfield().mem_blk())
540 }
541
542 pub fn mem_hops(&self) -> u8 {
549 self.bitfield().mem_hops() as _
550 }
551}
552
553impl fmt::Debug for DataSource {
554 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
555 f.debug_struct("DataSource")
556 .field("mem_op", &self.mem_op())
557 .field("mem_lvl", &self.mem_lvl())
558 .field("mem_snoop", &self.mem_snoop())
559 .field("mem_lock", &self.mem_lock())
560 .field("mem_dtlb", &self.mem_dtlb())
561 .field("mem_lvl_num", &self.mem_lvl_num())
562 .field("mem_remote", &self.mem_remote())
563 .field("mem_blk", &self.mem_blk())
564 .field("mem_hops", &self.mem_hops())
565 .finish()
566 }
567}
568
569impl<'p> Parse<'p> for DataSource {
570 fn parse<B, E>(p: &mut Parser<B, E>) -> ParseResult<Self>
571 where
572 E: Endian,
573 B: ParseBuf<'p>,
574 {
575 Ok(Self(perf_mem_data_src { val: p.parse()? }))
576 }
577}
578
579bitflags! {
580 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
584 pub struct MemOp : u64 {
585 const NA = bindings::PERF_MEM_OP_NA as _;
586 const LOAD = bindings::PERF_MEM_OP_LOAD as _;
587 const STORE = bindings::PERF_MEM_OP_STORE as _;
588 const PFETCH = bindings::PERF_MEM_OP_PFETCH as _;
589 const EXEC = bindings::PERF_MEM_OP_EXEC as _;
590 }
591
592 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
596 pub struct MemLevel : u64 {
597 const NA = bindings::PERF_MEM_LVL_NA as _;
598 const HIT = bindings::PERF_MEM_LVL_HIT as _;
599 const MISS = bindings::PERF_MEM_LVL_MISS as _;
600 const L1 = bindings::PERF_MEM_LVL_L1 as _;
601 const LFB = bindings::PERF_MEM_LVL_LFB as _;
602 const L2 = bindings::PERF_MEM_LVL_L2 as _;
603 const L3 = bindings::PERF_MEM_LVL_L3 as _;
604 const LOC_RAM = bindings::PERF_MEM_LVL_LOC_RAM as _;
605 const REM_RAM1 = bindings::PERF_MEM_LVL_REM_RAM1 as _;
606 const REM_RAM2 = bindings::PERF_MEM_LVL_REM_RAM2 as _;
607 const REM_CCE1 = bindings::PERF_MEM_LVL_REM_CCE1 as _;
608 const REM_CCE2 = bindings::PERF_MEM_LVL_REM_CCE2 as _;
609 const IO = bindings::PERF_MEM_LVL_IO as _;
610 const UNC = bindings::PERF_MEM_LVL_UNC as _;
611 }
612
613 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
617 pub struct MemLock : u64 {
618 const NA = bindings::PERF_MEM_LOCK_NA as _;
619 const LOCKED = bindings::PERF_MEM_LOCK_LOCKED as _;
620 }
621
622 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
626 pub struct MemDtlb : u64 {
627 const NA = bindings::PERF_MEM_TLB_NA as _;
628 const HIT = bindings::PERF_MEM_TLB_HIT as _;
629 const MISS = bindings::PERF_MEM_TLB_MISS as _;
630 const L1 = bindings::PERF_MEM_TLB_L1 as _;
631 const L2 = bindings::PERF_MEM_TLB_L2 as _;
632 const WK = bindings::PERF_MEM_TLB_WK as _;
633 const OS = bindings::PERF_MEM_TLB_OS as _;
634 }
635
636 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
640 pub struct MemSnoopX : u64 {
641 const FWD = bindings::PERF_MEM_SNOOPX_FWD as _;
642 const PEER = bindings::PERF_MEM_SNOOPX_PEER as _;
643 }
644
645 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
647 pub struct MemBlk : u64 {
648 const NA = bindings::PERF_MEM_BLK_NA as _;
649 const DATA = bindings::PERF_MEM_BLK_DATA as _;
650 const ADDR = bindings::PERF_MEM_BLK_ADDR as _;
651 }
652}
653
654bitflags! {
655 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
659 pub struct MemSnoop : u64 {
660 const NA = (bindings::PERF_MEM_SNOOP_NA as u64) << bindings::PERF_MEM_SNOOP_SHIFT;
661 const NONE = (bindings::PERF_MEM_SNOOP_NONE as u64) << bindings::PERF_MEM_SNOOP_SHIFT;
662 const HIT = (bindings::PERF_MEM_SNOOP_HIT as u64) << bindings::PERF_MEM_SNOOP_SHIFT;
663 const MISS = (bindings::PERF_MEM_SNOOP_MISS as u64) << bindings::PERF_MEM_SNOOP_SHIFT;
664 const HITM = (bindings::PERF_MEM_SNOOP_HITM as u64) << bindings::PERF_MEM_SNOOP_SHIFT;
665
666 const FWD = (bindings::PERF_MEM_SNOOPX_FWD as u64) << bindings::PERF_MEM_SNOOPX_SHIFT;
667 const PEER = (bindings::PERF_MEM_SNOOPX_PEER as u64) << bindings::PERF_MEM_SNOOPX_SHIFT;
668 }
669}
670
671impl MemSnoop {
672 pub fn new(mut mem_snoop: u64, mut mem_snoopx: u64) -> Self {
673 mem_snoop &= Self::SNOOP_MASK;
674 mem_snoopx &= Self::SNOOPX_MASK;
675
676 Self::from_bits_truncate(
677 (mem_snoop << bindings::PERF_MEM_SNOOP_SHIFT)
678 | (mem_snoopx << bindings::PERF_MEM_SNOOPX_SHIFT),
679 )
680 }
681
682 const SNOOP_MASK: u64 = 0b11111;
683 const SNOOPX_MASK: u64 = 0b11;
684}
685
686bitflags! {
687 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
689 pub struct Txn: u64 {
690 const ELISION = bindings::PERF_TXN_ELISION as _;
691 const TRANSACTION = bindings::PERF_TXN_TRANSACTION as _;
692 const SYNC = bindings::PERF_TXN_SYNC as _;
693 const ASYNC = bindings::PERF_TXN_ASYNC as _;
694 const RETRY = bindings::PERF_TXN_RETRY as _;
695 const CONFLICT = bindings::PERF_TXN_CONFLICT as _;
696 const CAPACITY_WRITE = bindings::PERF_TXN_CAPACITY_WRITE as _;
697 const CAPACITY_READ = bindings::PERF_TXN_CAPACITY_READ as _;
698
699 const ABORT_MASK = bindings::PERF_TXN_ABORT_MASK as _;
700 }
701}
702
703impl Txn {
704 pub fn abort(&self) -> u32 {
706 (self.bits() >> bindings::PERF_TXN_ABORT_SHIFT) as _
707 }
708}
709
710impl<'p> Parse<'p> for Txn {
711 fn parse<B, E>(p: &mut Parser<B, E>) -> ParseResult<Self>
712 where
713 E: Endian,
714 B: ParseBuf<'p>,
715 {
716 Ok(Self::from_bits_retain(p.parse()?))
717 }
718}
719
720c_enum! {
721 #[derive(Copy, Clone, Eq, PartialEq, Hash)]
728 pub enum MemLevelNum : u8 {
729 L1 = bindings::PERF_MEM_LVLNUM_L1 as _,
730 L2 = bindings::PERF_MEM_LVLNUM_L2 as _,
731 L3 = bindings::PERF_MEM_LVLNUM_L3 as _,
732 L4 = bindings::PERF_MEM_LVLNUM_L4 as _,
733
734 ANY_CACHE = bindings::PERF_MEM_LVLNUM_ANY_CACHE as _,
735 LFB = bindings::PERF_MEM_LVLNUM_LFB as _,
736 RAM = bindings::PERF_MEM_LVLNUM_RAM as _,
737 PMEM = bindings::PERF_MEM_LVLNUM_PMEM as _,
738 NA = bindings::PERF_MEM_LVLNUM_NA as _,
739 }
740}
741
742impl MemLevelNum {
743 pub const fn new(value: u8) -> Self {
745 Self(value)
746 }
747}
748
749#[cfg(test)]
750mod tests {
751 use crate::endian::Little;
752
753 use super::*;
754
755 #[test]
756 fn simple_parse_sample() {
757 #[rustfmt::skip]
758 let data: &[u8] = &[
759 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
760 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F
761 ];
762
763 let config: ParseConfig<Little> =
764 ParseConfig::default().with_sample_type(SampleFlags::ADDR | SampleFlags::ID);
765 let sample: Sample = Parser::new(data, config).parse().unwrap();
766
767 assert_eq!(sample.addr(), Some(0x0706050403020100));
768 assert_eq!(sample.id(), Some(0x0F0E0D0C0B0A0908));
769 assert_eq!(sample.cpu(), None);
770 assert_eq!(sample.time(), None);
771 }
772
773 #[test]
774 fn parse_sample_with_cgroup() {
775 #[rustfmt::skip]
776 let data: &[u8] = &[
777 0xd4, 0x08, 0x00, 0x00, 0xd4, 0x08, 0x00, 0x00,
778 0xc9, 0x77, 0x8e, 0xa1, 0x3a, 0xa4, 0x00, 0x00,
779 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
780 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
781 0xd0, 0xbe, 0xc0, 0x28, 0x00, 0x00, 0x00, 0x00,
782 0x24, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
783 0xbd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
784 0xac, 0x79, 0xc0, 0x28, 0x00, 0x00, 0x00, 0x00,
785 0xbe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
786 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
787 ];
788
789 let config: ParseConfig<Little> = ParseConfig::default()
790 .with_sample_type(
791 SampleFlags::TID
792 | SampleFlags::CGROUP
793 | SampleFlags::READ
794 | SampleFlags::TIME
795 | SampleFlags::CPU,
796 )
797 .with_read_format(ReadFormat::GROUP | ReadFormat::TOTAL_TIME_ENABLED);
798 let sample: Sample = Parser::new(data, config).parse().unwrap();
799
800 assert_eq!(sample.pid(), Some(0x08d4));
801 assert_eq!(sample.tid(), Some(0x08d4));
802 assert_eq!(sample.time(), Some(0xA43AA18E77C9));
803 assert_eq!(sample.cpu(), Some(0));
804
805 let group = sample.values().unwrap();
806 assert_eq!(group.len(), 2);
807
808 assert_eq!(sample.cgroup(), Some(1));
809 }
810}