1use bitflags::bitflags;
3use core::fmt::{self, Debug, Display, Formatter};
4use core::mem::size_of;
5use core::slice;
6use core::str;
7
8use crate::{
9 get_bits, CpuIdReader, CpuIdResult, Vendor, EAX_EXTENDED_CPU_TOPOLOGY,
10 EAX_PQOS_EXTENDED_FEATURES,
11};
12
13pub struct ExtendedProcessorFeatureIdentifiers {
18 vendor: Vendor,
19 eax: u32,
20 ebx: u32,
21 ecx: ExtendedFunctionInfoEcx,
22 edx: ExtendedFunctionInfoEdx,
23}
24
25impl ExtendedProcessorFeatureIdentifiers {
26 pub(crate) fn new(vendor: Vendor, data: CpuIdResult) -> Self {
27 Self {
28 vendor,
29 eax: data.eax,
30 ebx: data.ebx,
31 ecx: ExtendedFunctionInfoEcx::from_bits_truncate(data.ecx),
32 edx: ExtendedFunctionInfoEdx::from_bits_truncate(data.edx),
33 }
34 }
35
36 pub fn extended_signature(&self) -> u32 {
49 self.eax
50 }
51
52 pub fn pkg_type(&self) -> u32 {
60 get_bits(self.ebx, 28, 31)
61 }
62
63 pub fn brand_id(&self) -> u32 {
71 get_bits(self.ebx, 0, 15)
72 }
73
74 pub fn has_lahf_sahf(&self) -> bool {
79 self.ecx.contains(ExtendedFunctionInfoEcx::LAHF_SAHF)
80 }
81
82 pub fn has_cmp_legacy(&self) -> bool {
87 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::CMP_LEGACY)
88 }
89
90 pub fn has_svm(&self) -> bool {
95 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::SVM)
96 }
97
98 pub fn has_ext_apic_space(&self) -> bool {
106 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::EXT_APIC_SPACE)
107 }
108
109 pub fn has_alt_mov_cr8(&self) -> bool {
114 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::ALTMOVCR8)
115 }
116
117 pub fn has_lzcnt(&self) -> bool {
126 self.ecx.contains(ExtendedFunctionInfoEcx::LZCNT)
127 }
128
129 pub fn has_sse4a(&self) -> bool {
136 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::SSE4A)
137 }
138
139 pub fn has_misaligned_sse_mode(&self) -> bool {
145 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::MISALIGNSSE)
146 }
147
148 pub fn has_prefetchw(&self) -> bool {
156 self.ecx.contains(ExtendedFunctionInfoEcx::PREFETCHW)
157 }
158
159 pub fn has_osvw(&self) -> bool {
164 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::OSVW)
165 }
166
167 pub fn has_ibs(&self) -> bool {
172 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::IBS)
173 }
174
175 pub fn has_xop(&self) -> bool {
180 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::XOP)
181 }
182
183 pub fn has_skinit(&self) -> bool {
191 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::SKINIT)
192 }
193
194 pub fn has_wdt(&self) -> bool {
201 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::WDT)
202 }
203
204 pub fn has_lwp(&self) -> bool {
209 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::LWP)
210 }
211
212 pub fn has_fma4(&self) -> bool {
217 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::FMA4)
218 }
219
220 pub fn has_tbm(&self) -> bool {
225 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::TBM)
226 }
227
228 pub fn has_topology_extensions(&self) -> bool {
235 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::TOPEXT)
236 }
237
238 pub fn has_perf_cntr_extensions(&self) -> bool {
245 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::PERFCTREXT)
246 }
247
248 pub fn has_nb_perf_cntr_extensions(&self) -> bool {
255 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::PERFCTREXTNB)
256 }
257
258 pub fn has_data_access_bkpt_extension(&self) -> bool {
265 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::DATABRKPEXT)
266 }
267
268 pub fn has_perf_tsc(&self) -> bool {
275 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::PERFTSC)
276 }
277
278 pub fn has_perf_cntr_llc_extensions(&self) -> bool {
283 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::PERFCTREXTLLC)
284 }
285
286 pub fn has_monitorx_mwaitx(&self) -> bool {
291 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::MONITORX)
292 }
293
294 pub fn has_addr_mask_extension(&self) -> bool {
299 self.vendor == Vendor::Amd && self.ecx.contains(ExtendedFunctionInfoEcx::ADDRMASKEXT)
300 }
301
302 pub fn has_syscall_sysret(&self) -> bool {
307 self.edx.contains(ExtendedFunctionInfoEdx::SYSCALL_SYSRET)
308 }
309
310 pub fn has_execute_disable(&self) -> bool {
315 self.edx.contains(ExtendedFunctionInfoEdx::EXECUTE_DISABLE)
316 }
317
318 pub fn has_mmx_extensions(&self) -> bool {
323 self.vendor == Vendor::Amd && self.edx.contains(ExtendedFunctionInfoEdx::MMXEXT)
324 }
325
326 pub fn has_fast_fxsave_fxstor(&self) -> bool {
331 self.vendor == Vendor::Amd && self.edx.contains(ExtendedFunctionInfoEdx::FFXSR)
332 }
333
334 pub fn has_1gib_pages(&self) -> bool {
339 self.edx.contains(ExtendedFunctionInfoEdx::GIB_PAGES)
340 }
341
342 pub fn has_rdtscp(&self) -> bool {
347 self.edx.contains(ExtendedFunctionInfoEdx::RDTSCP)
348 }
349
350 pub fn has_64bit_mode(&self) -> bool {
355 self.edx.contains(ExtendedFunctionInfoEdx::I64BIT_MODE)
356 }
357
358 pub fn has_amd_3dnow_extensions(&self) -> bool {
363 self.vendor == Vendor::Amd && self.edx.contains(ExtendedFunctionInfoEdx::THREEDNOWEXT)
364 }
365
366 pub fn has_3dnow(&self) -> bool {
371 self.vendor == Vendor::Amd && self.edx.contains(ExtendedFunctionInfoEdx::THREEDNOW)
372 }
373}
374
375impl Debug for ExtendedProcessorFeatureIdentifiers {
376 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
377 let mut ds = f.debug_struct("ExtendedProcessorFeatureIdentifiers");
378 ds.field("extended_signature", &self.extended_signature());
379
380 if self.vendor == Vendor::Amd {
381 ds.field("pkg_type", &self.pkg_type());
382 ds.field("brand_id", &self.brand_id());
383 }
384 ds.field("ecx_features", &self.ecx);
385 ds.field("edx_features", &self.edx);
386 ds.finish()
387 }
388}
389
390bitflags! {
391 #[repr(transparent)]
392 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
393 struct ExtendedFunctionInfoEcx: u32 {
394 const LAHF_SAHF = 1 << 0;
395 const CMP_LEGACY = 1 << 1;
396 const SVM = 1 << 2;
397 const EXT_APIC_SPACE = 1 << 3;
398 const ALTMOVCR8 = 1 << 4;
399 const LZCNT = 1 << 5;
400 const SSE4A = 1 << 6;
401 const MISALIGNSSE = 1 << 7;
402 const PREFETCHW = 1 << 8;
403 const OSVW = 1 << 9;
404 const IBS = 1 << 10;
405 const XOP = 1 << 11;
406 const SKINIT = 1 << 12;
407 const WDT = 1 << 13;
408 const LWP = 1 << 15;
409 const FMA4 = 1 << 16;
410 const TBM = 1 << 21;
411 const TOPEXT = 1 << 22;
412 const PERFCTREXT = 1 << 23;
413 const PERFCTREXTNB = 1 << 24;
414 const DATABRKPEXT = 1 << 26;
415 const PERFTSC = 1 << 27;
416 const PERFCTREXTLLC = 1 << 28;
417 const MONITORX = 1 << 29;
418 const ADDRMASKEXT = 1 << 30;
419 }
420}
421
422bitflags! {
423 #[repr(transparent)]
424 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
425 struct ExtendedFunctionInfoEdx: u32 {
426 const SYSCALL_SYSRET = 1 << 11;
427 const EXECUTE_DISABLE = 1 << 20;
428 const MMXEXT = 1 << 22;
429 const FFXSR = 1 << 25;
430 const GIB_PAGES = 1 << 26;
431 const RDTSCP = 1 << 27;
432 const I64BIT_MODE = 1 << 29;
433 const THREEDNOWEXT = 1 << 30;
434 const THREEDNOW = 1 << 31;
435 }
436}
437
438pub struct ProcessorBrandString {
445 data: [CpuIdResult; 3],
446}
447
448impl ProcessorBrandString {
449 pub(crate) fn new(data: [CpuIdResult; 3]) -> Self {
450 Self { data }
451 }
452
453 pub fn as_str(&self) -> &str {
458 let slice: &[u8] = unsafe {
461 slice::from_raw_parts(
462 self.data.as_ptr() as *const u8,
463 self.data.len() * size_of::<CpuIdResult>(),
464 )
465 };
466
467 let slice = slice.split(|&x| x == 0).next().unwrap();
469 str::from_utf8(slice)
470 .unwrap_or("Invalid Processor Brand String")
471 .trim()
472 }
473}
474
475impl Debug for ProcessorBrandString {
476 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
477 f.debug_struct("ProcessorBrandString")
478 .field("as_str", &self.as_str())
479 .finish()
480 }
481}
482
483#[derive(PartialEq, Eq, Debug)]
488pub struct L1CacheTlbInfo {
489 eax: u32,
490 ebx: u32,
491 ecx: u32,
492 edx: u32,
493}
494
495impl L1CacheTlbInfo {
496 pub(crate) fn new(data: CpuIdResult) -> Self {
497 Self {
498 eax: data.eax,
499 ebx: data.ebx,
500 ecx: data.ecx,
501 edx: data.edx,
502 }
503 }
504
505 pub fn dtlb_2m_4m_associativity(&self) -> Associativity {
507 let assoc_bits = get_bits(self.eax, 24, 31) as u8;
508 Associativity::for_l1(assoc_bits)
509 }
510
511 pub fn dtlb_2m_4m_size(&self) -> u8 {
517 get_bits(self.eax, 16, 23) as u8
518 }
519
520 pub fn itlb_2m_4m_associativity(&self) -> Associativity {
522 let assoc_bits = get_bits(self.eax, 8, 15) as u8;
523 Associativity::for_l1(assoc_bits)
524 }
525
526 pub fn itlb_2m_4m_size(&self) -> u8 {
532 get_bits(self.eax, 0, 7) as u8
533 }
534
535 pub fn dtlb_4k_associativity(&self) -> Associativity {
537 let assoc_bits = get_bits(self.ebx, 24, 31) as u8;
538 Associativity::for_l1(assoc_bits)
539 }
540
541 pub fn dtlb_4k_size(&self) -> u8 {
543 get_bits(self.ebx, 16, 23) as u8
544 }
545
546 pub fn itlb_4k_associativity(&self) -> Associativity {
548 let assoc_bits = get_bits(self.ebx, 8, 15) as u8;
549 Associativity::for_l1(assoc_bits)
550 }
551
552 pub fn itlb_4k_size(&self) -> u8 {
554 get_bits(self.ebx, 0, 7) as u8
555 }
556
557 pub fn dcache_size(&self) -> u8 {
559 get_bits(self.ecx, 24, 31) as u8
560 }
561
562 pub fn dcache_associativity(&self) -> Associativity {
564 let assoc_bits = get_bits(self.ecx, 16, 23) as u8;
565 Associativity::for_l1(assoc_bits)
566 }
567
568 pub fn dcache_lines_per_tag(&self) -> u8 {
570 get_bits(self.ecx, 8, 15) as u8
571 }
572
573 pub fn dcache_line_size(&self) -> u8 {
575 get_bits(self.ecx, 0, 7) as u8
576 }
577
578 pub fn icache_size(&self) -> u8 {
580 get_bits(self.edx, 24, 31) as u8
581 }
582
583 pub fn icache_associativity(&self) -> Associativity {
585 let assoc_bits = get_bits(self.edx, 16, 23) as u8;
586 Associativity::for_l1(assoc_bits)
587 }
588
589 pub fn icache_lines_per_tag(&self) -> u8 {
591 get_bits(self.edx, 8, 15) as u8
592 }
593
594 pub fn icache_line_size(&self) -> u8 {
596 get_bits(self.edx, 0, 7) as u8
597 }
598}
599
600#[derive(PartialEq, Eq, Debug)]
605pub struct L2And3CacheTlbInfo {
606 eax: u32,
607 ebx: u32,
608 ecx: u32,
609 edx: u32,
610}
611
612impl L2And3CacheTlbInfo {
613 pub(crate) fn new(data: CpuIdResult) -> Self {
614 Self {
615 eax: data.eax,
616 ebx: data.ebx,
617 ecx: data.ecx,
618 edx: data.edx,
619 }
620 }
621
622 pub fn dtlb_2m_4m_associativity(&self) -> Associativity {
627 let assoc_bits = get_bits(self.eax, 28, 31) as u8;
628 Associativity::for_l2(assoc_bits)
629 }
630
631 pub fn dtlb_2m_4m_size(&self) -> u16 {
640 get_bits(self.eax, 16, 27) as u16
641 }
642
643 pub fn itlb_2m_4m_associativity(&self) -> Associativity {
648 let assoc_bits = get_bits(self.eax, 12, 15) as u8;
649 Associativity::for_l2(assoc_bits)
650 }
651
652 pub fn itlb_2m_4m_size(&self) -> u16 {
661 get_bits(self.eax, 0, 11) as u16
662 }
663
664 pub fn dtlb_4k_associativity(&self) -> Associativity {
669 let assoc_bits = get_bits(self.ebx, 28, 31) as u8;
670 Associativity::for_l2(assoc_bits)
671 }
672
673 pub fn dtlb_4k_size(&self) -> u16 {
678 get_bits(self.ebx, 16, 27) as u16
679 }
680
681 pub fn itlb_4k_associativity(&self) -> Associativity {
686 let assoc_bits = get_bits(self.ebx, 12, 15) as u8;
687 Associativity::for_l2(assoc_bits)
688 }
689
690 pub fn itlb_4k_size(&self) -> u16 {
695 get_bits(self.ebx, 0, 11) as u16
696 }
697
698 pub fn l2cache_line_size(&self) -> u8 {
703 get_bits(self.ecx, 0, 7) as u8
704 }
705
706 pub fn l2cache_lines_per_tag(&self) -> u8 {
711 get_bits(self.ecx, 8, 11) as u8
712 }
713
714 pub fn l2cache_associativity(&self) -> Associativity {
719 let assoc_bits = get_bits(self.ecx, 12, 15) as u8;
720 Associativity::for_l2(assoc_bits)
721 }
722
723 pub fn l2cache_size(&self) -> u16 {
728 get_bits(self.ecx, 16, 31) as u16
729 }
730
731 pub fn l3cache_line_size(&self) -> u8 {
736 get_bits(self.edx, 0, 7) as u8
737 }
738
739 pub fn l3cache_lines_per_tag(&self) -> u8 {
744 get_bits(self.edx, 8, 11) as u8
745 }
746
747 pub fn l3cache_associativity(&self) -> Associativity {
752 let assoc_bits = get_bits(self.edx, 12, 15) as u8;
753 Associativity::for_l3(assoc_bits)
754 }
755
756 pub fn l3cache_size(&self) -> u16 {
763 get_bits(self.edx, 18, 31) as u16
764 }
765}
766
767#[derive(PartialEq, Eq, Debug)]
769pub enum Associativity {
770 Disabled,
771 DirectMapped,
772 NWay(u8),
773 FullyAssociative,
774 Unknown,
775}
776
777impl Display for Associativity {
778 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
779 let s = match self {
780 Associativity::Disabled => "Disabled",
781 Associativity::DirectMapped => "Direct mapped",
782 Associativity::NWay(n) => {
783 return write!(f, "NWay({})", n);
784 }
785 Associativity::FullyAssociative => "Fully associative",
786 Associativity::Unknown => "Unknown (check leaf 0x8000_001d)",
787 };
788 f.write_str(s)
789 }
790}
791
792impl Associativity {
793 fn for_l1(n: u8) -> Associativity {
795 match n {
796 0x0 => Associativity::Disabled, 0x1 => Associativity::DirectMapped,
798 0x2..=0xfe => Associativity::NWay(n),
799 0xff => Associativity::FullyAssociative,
800 }
801 }
802
803 fn for_l2(n: u8) -> Associativity {
805 match n {
806 0x0 => Associativity::Disabled,
807 0x1 => Associativity::DirectMapped,
808 0x2 => Associativity::NWay(2),
809 0x4 => Associativity::NWay(4),
810 0x5 => Associativity::NWay(6), 0x6 => Associativity::NWay(8),
812 0x8 => Associativity::NWay(16),
814 0x9 => Associativity::Unknown, 0xa => Associativity::NWay(32),
816 0xb => Associativity::NWay(48),
817 0xc => Associativity::NWay(64),
818 0xd => Associativity::NWay(96),
819 0xe => Associativity::NWay(128),
820 0xF => Associativity::FullyAssociative,
821 _ => Associativity::Unknown,
822 }
823 }
824
825 fn for_l3(n: u8) -> Associativity {
827 Associativity::for_l2(n)
828 }
829}
830
831#[derive(Debug, PartialEq, Eq)]
836pub struct ApmInfo {
837 _eax: u32,
839 ebx: RasCapabilities,
840 ecx: u32,
841 edx: ApmInfoEdx,
842}
843
844impl ApmInfo {
845 pub(crate) fn new(data: CpuIdResult) -> Self {
846 Self {
847 _eax: data.eax,
848 ebx: RasCapabilities::from_bits_truncate(data.ebx),
849 ecx: data.ecx,
850 edx: ApmInfoEdx::from_bits_truncate(data.edx),
851 }
852 }
853
854 pub fn has_mca_overflow_recovery(&self) -> bool {
863 self.ebx.contains(RasCapabilities::MCAOVFLRECOV)
864 }
865
866 pub fn has_succor(&self) -> bool {
875 self.ebx.contains(RasCapabilities::SUCCOR)
876 }
877
878 pub fn has_hwa(&self) -> bool {
885 self.ebx.contains(RasCapabilities::HWA)
886 }
887
888 pub fn cpu_pwr_sample_time_ratio(&self) -> u32 {
896 self.ecx
897 }
898
899 pub fn has_ts(&self) -> bool {
904 self.edx.contains(ApmInfoEdx::TS)
905 }
906
907 pub fn has_freq_id_ctrl(&self) -> bool {
915 self.edx.contains(ApmInfoEdx::FID)
916 }
917
918 pub fn has_volt_id_ctrl(&self) -> bool {
926 self.edx.contains(ApmInfoEdx::VID)
927 }
928
929 pub fn has_thermtrip(&self) -> bool {
934 self.edx.contains(ApmInfoEdx::TTP)
935 }
936
937 pub fn has_tm(&self) -> bool {
942 self.edx.contains(ApmInfoEdx::TM)
943 }
944
945 pub fn has_100mhz_steps(&self) -> bool {
950 self.edx.contains(ApmInfoEdx::MHZSTEPS100)
951 }
952
953 pub fn has_hw_pstate(&self) -> bool {
961 self.edx.contains(ApmInfoEdx::HWPSTATE)
962 }
963
964 pub fn has_invariant_tsc(&self) -> bool {
969 self.edx.contains(ApmInfoEdx::INVTSC)
970 }
971
972 pub fn has_cpb(&self) -> bool {
977 self.edx.contains(ApmInfoEdx::CPB)
978 }
979
980 pub fn has_ro_effective_freq_iface(&self) -> bool {
989 self.edx.contains(ApmInfoEdx::EFFFREQRO)
990 }
991
992 pub fn has_feedback_iface(&self) -> bool {
1000 self.edx.contains(ApmInfoEdx::PROCFEEDBACKIF)
1001 }
1002
1003 pub fn has_power_reporting_iface(&self) -> bool {
1008 self.edx.contains(ApmInfoEdx::PROCPWRREPORT)
1009 }
1010}
1011
1012bitflags! {
1013 #[repr(transparent)]
1014 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1015 struct ApmInfoEdx: u32 {
1016 const TS = 1 << 0;
1017 const FID = 1 << 1;
1018 const VID = 1 << 2;
1019 const TTP = 1 << 3;
1020 const TM = 1 << 4;
1021 const MHZSTEPS100 = 1 << 6;
1022 const HWPSTATE = 1 << 7;
1023 const INVTSC = 1 << 8;
1024 const CPB = 1 << 9;
1025 const EFFFREQRO = 1 << 10;
1026 const PROCFEEDBACKIF = 1 << 11;
1027 const PROCPWRREPORT = 1 << 12;
1028 }
1029}
1030
1031bitflags! {
1032 #[repr(transparent)]
1033 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1034 struct RasCapabilities: u32 {
1035 const MCAOVFLRECOV = 1 << 0;
1036 const SUCCOR = 1 << 1;
1037 const HWA = 1 << 2;
1038 }
1039}
1040
1041#[derive(PartialEq, Eq)]
1051pub struct ProcessorCapacityAndFeatureInfo {
1052 eax: u32,
1053 ebx: ProcessorCapacityAndFeatureEbx,
1054 ecx: u32,
1055 edx: u32,
1056}
1057
1058impl ProcessorCapacityAndFeatureInfo {
1059 pub(crate) fn new(data: CpuIdResult) -> Self {
1060 Self {
1061 eax: data.eax,
1062 ebx: ProcessorCapacityAndFeatureEbx::from_bits_truncate(data.ebx),
1063 ecx: data.ecx,
1064 edx: data.edx,
1065 }
1066 }
1067
1068 pub fn physical_address_bits(&self) -> u8 {
1073 get_bits(self.eax, 0, 7) as u8
1074 }
1075
1076 pub fn linear_address_bits(&self) -> u8 {
1081 get_bits(self.eax, 8, 15) as u8
1082 }
1083
1084 pub fn guest_physical_address_bits(&self) -> u8 {
1093 get_bits(self.eax, 16, 23) as u8
1094 }
1095
1096 pub fn has_cl_zero(&self) -> bool {
1101 self.ebx.contains(ProcessorCapacityAndFeatureEbx::CLZERO)
1102 }
1103
1104 pub fn has_inst_ret_cntr_msr(&self) -> bool {
1109 self.ebx
1110 .contains(ProcessorCapacityAndFeatureEbx::INST_RETCNT_MSR)
1111 }
1112
1113 pub fn has_restore_fp_error_ptrs(&self) -> bool {
1118 self.ebx
1119 .contains(ProcessorCapacityAndFeatureEbx::RSTR_FP_ERR_PTRS)
1120 }
1121
1122 pub fn has_invlpgb(&self) -> bool {
1127 self.ebx.contains(ProcessorCapacityAndFeatureEbx::INVLPGB)
1128 }
1129
1130 pub fn has_rdpru(&self) -> bool {
1135 self.ebx.contains(ProcessorCapacityAndFeatureEbx::RDPRU)
1136 }
1137
1138 pub fn has_mcommit(&self) -> bool {
1143 self.ebx.contains(ProcessorCapacityAndFeatureEbx::MCOMMIT)
1144 }
1145
1146 pub fn has_wbnoinvd(&self) -> bool {
1151 self.ebx.contains(ProcessorCapacityAndFeatureEbx::WBNOINVD)
1152 }
1153
1154 pub fn has_int_wbinvd(&self) -> bool {
1159 self.ebx
1160 .contains(ProcessorCapacityAndFeatureEbx::INT_WBINVD)
1161 }
1162
1163 pub fn has_unsupported_efer_lmsle(&self) -> bool {
1168 self.ebx
1169 .contains(ProcessorCapacityAndFeatureEbx::EFER_LMSLE_UNSUPP)
1170 }
1171
1172 pub fn has_invlpgb_nested(&self) -> bool {
1177 self.ebx
1178 .contains(ProcessorCapacityAndFeatureEbx::INVLPGB_NESTED)
1179 }
1180
1181 pub fn perf_tsc_size(&self) -> usize {
1188 let s = get_bits(self.ecx, 16, 17) as u8;
1189 match s & 0b11 {
1190 0b00 => 40,
1191 0b01 => 48,
1192 0b10 => 56,
1193 0b11 => 64,
1194 _ => unreachable!("AND with 0b11 in match"),
1195 }
1196 }
1197
1198 pub fn apic_id_size(&self) -> u8 {
1207 get_bits(self.ecx, 12, 15) as u8
1208 }
1209
1210 pub fn maximum_logical_processors(&self) -> usize {
1220 usize::pow(2, self.apic_id_size() as u32)
1221 }
1222
1223 pub fn num_phys_threads(&self) -> usize {
1228 get_bits(self.ecx, 0, 7) as usize + 1
1229 }
1230
1231 pub fn invlpgb_max_pages(&self) -> u16 {
1236 get_bits(self.edx, 0, 15) as u16
1237 }
1238
1239 pub fn max_rdpru_id(&self) -> u16 {
1244 get_bits(self.edx, 16, 31) as u16
1245 }
1246}
1247
1248impl Debug for ProcessorCapacityAndFeatureInfo {
1249 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1250 f.debug_struct("ProcessorCapacityAndFeatureInfo")
1251 .field("physical_address_bits", &self.physical_address_bits())
1252 .field("linear_address_bits", &self.linear_address_bits())
1253 .field(
1254 "guest_physical_address_bits",
1255 &self.guest_physical_address_bits(),
1256 )
1257 .field("has_cl_zero", &self.has_cl_zero())
1258 .field("has_inst_ret_cntr_msr", &self.has_inst_ret_cntr_msr())
1259 .field(
1260 "has_restore_fp_error_ptrs",
1261 &self.has_restore_fp_error_ptrs(),
1262 )
1263 .field("has_invlpgb", &self.has_invlpgb())
1264 .field("has_rdpru", &self.has_rdpru())
1265 .field("has_mcommit", &self.has_mcommit())
1266 .field("has_wbnoinvd", &self.has_wbnoinvd())
1267 .field("has_int_wbinvd", &self.has_int_wbinvd())
1268 .field(
1269 "has_unsupported_efer_lmsle",
1270 &self.has_unsupported_efer_lmsle(),
1271 )
1272 .field("has_invlpgb_nested", &self.has_invlpgb_nested())
1273 .field("perf_tsc_size", &self.perf_tsc_size())
1274 .field("apic_id_size", &self.apic_id_size())
1275 .field(
1276 "maximum_logical_processors",
1277 &self.maximum_logical_processors(),
1278 )
1279 .field("num_phys_threads", &self.num_phys_threads())
1280 .field("invlpgb_max_pages", &self.invlpgb_max_pages())
1281 .field("max_rdpru_id", &self.max_rdpru_id())
1282 .finish()
1283 }
1284}
1285
1286bitflags! {
1287 #[repr(transparent)]
1288 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1289 struct ProcessorCapacityAndFeatureEbx: u32 {
1290 const CLZERO = 1 << 0;
1291 const INST_RETCNT_MSR = 1 << 1;
1292 const RSTR_FP_ERR_PTRS = 1 << 2;
1293 const INVLPGB = 1 << 3;
1294 const RDPRU = 1 << 4;
1295 const MCOMMIT = 1 << 8;
1296 const WBNOINVD = 1 << 9;
1297 const INT_WBINVD = 1 << 13;
1298 const EFER_LMSLE_UNSUPP = 1 << 20;
1299 const INVLPGB_NESTED = 1 << 21;
1300 }
1301}
1302
1303#[derive(PartialEq, Eq, Debug)]
1312pub struct SvmFeatures {
1313 eax: u32,
1314 ebx: u32,
1315 _ecx: u32,
1317 edx: SvmFeaturesEdx,
1318}
1319
1320impl SvmFeatures {
1321 pub(crate) fn new(data: CpuIdResult) -> Self {
1322 Self {
1323 eax: data.eax,
1324 ebx: data.ebx,
1325 _ecx: data.ecx,
1326 edx: SvmFeaturesEdx::from_bits_truncate(data.edx),
1327 }
1328 }
1329
1330 pub fn revision(&self) -> u8 {
1332 get_bits(self.eax, 0, 7) as u8
1333 }
1334
1335 pub fn supported_asids(&self) -> u32 {
1337 self.ebx
1338 }
1339
1340 pub fn has_nested_paging(&self) -> bool {
1342 self.edx.contains(SvmFeaturesEdx::NP)
1343 }
1344
1345 pub fn has_lbr_virtualization(&self) -> bool {
1347 self.edx.contains(SvmFeaturesEdx::LBR_VIRT)
1348 }
1349
1350 pub fn has_svm_lock(&self) -> bool {
1352 self.edx.contains(SvmFeaturesEdx::SVML)
1353 }
1354
1355 pub fn has_nrip(&self) -> bool {
1357 self.edx.contains(SvmFeaturesEdx::NRIPS)
1358 }
1359
1360 pub fn has_tsc_rate_msr(&self) -> bool {
1362 self.edx.contains(SvmFeaturesEdx::TSC_RATE_MSR)
1363 }
1364
1365 pub fn has_vmcb_clean_bits(&self) -> bool {
1367 self.edx.contains(SvmFeaturesEdx::VMCB_CLEAN)
1368 }
1369
1370 pub fn has_flush_by_asid(&self) -> bool {
1375 self.edx.contains(SvmFeaturesEdx::FLUSH_BY_ASID)
1376 }
1377
1378 pub fn has_decode_assists(&self) -> bool {
1380 self.edx.contains(SvmFeaturesEdx::DECODE_ASSISTS)
1381 }
1382
1383 pub fn has_pause_filter(&self) -> bool {
1385 self.edx.contains(SvmFeaturesEdx::PAUSE_FILTER)
1386 }
1387
1388 pub fn has_pause_filter_threshold(&self) -> bool {
1390 self.edx.contains(SvmFeaturesEdx::PAUSE_FILTER_THRESHOLD)
1391 }
1392
1393 pub fn has_avic(&self) -> bool {
1395 self.edx.contains(SvmFeaturesEdx::AVIC)
1396 }
1397
1398 pub fn has_vmsave_virtualization(&self) -> bool {
1400 self.edx.contains(SvmFeaturesEdx::VMSAVE_VIRT)
1401 }
1402
1403 pub fn has_gif(&self) -> bool {
1405 self.edx.contains(SvmFeaturesEdx::VGIF)
1406 }
1407
1408 pub fn has_gmet(&self) -> bool {
1410 self.edx.contains(SvmFeaturesEdx::GMET)
1411 }
1412
1413 pub fn has_sss_check(&self) -> bool {
1415 self.edx.contains(SvmFeaturesEdx::SSS_CHECK)
1416 }
1417
1418 pub fn has_spec_ctrl(&self) -> bool {
1420 self.edx.contains(SvmFeaturesEdx::SPEC_CTRL)
1421 }
1422
1423 pub fn has_host_mce_override(&self) -> bool {
1426 self.edx.contains(SvmFeaturesEdx::HOST_MCE_OVERRIDE)
1427 }
1428
1429 pub fn has_tlb_ctrl(&self) -> bool {
1432 self.edx.contains(SvmFeaturesEdx::TLB_CTL)
1433 }
1434}
1435
1436bitflags! {
1437 #[repr(transparent)]
1438 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1439 struct SvmFeaturesEdx: u32 {
1440 const NP = 1 << 0;
1441 const LBR_VIRT = 1 << 1;
1442 const SVML = 1 << 2;
1443 const NRIPS = 1 << 3;
1444 const TSC_RATE_MSR = 1 << 4;
1445 const VMCB_CLEAN = 1 << 5;
1446 const FLUSH_BY_ASID = 1 << 6;
1447 const DECODE_ASSISTS = 1 << 7;
1448 const PAUSE_FILTER = 1 << 10;
1449 const PAUSE_FILTER_THRESHOLD = 1 << 12;
1450 const AVIC = 1 << 13;
1451 const VMSAVE_VIRT = 1 << 15;
1452 const VGIF = 1 << 16;
1453 const GMET = 1 << 17;
1454 const SSS_CHECK = 1 << 19;
1455 const SPEC_CTRL = 1 << 20;
1456 const HOST_MCE_OVERRIDE = 1 << 23;
1457 const TLB_CTL = 1 << 24;
1458 }
1459}
1460
1461#[derive(PartialEq, Eq, Debug)]
1466pub struct Tlb1gbPageInfo {
1467 eax: u32,
1468 ebx: u32,
1469 _ecx: u32,
1471 _edx: u32,
1473}
1474
1475impl Tlb1gbPageInfo {
1476 pub(crate) fn new(data: CpuIdResult) -> Self {
1477 Self {
1478 eax: data.eax,
1479 ebx: data.ebx,
1480 _ecx: data.ecx,
1481 _edx: data.edx,
1482 }
1483 }
1484
1485 pub fn dtlb_l1_1gb_associativity(&self) -> Associativity {
1487 let assoc_bits = get_bits(self.eax, 28, 31) as u8;
1488 Associativity::for_l2(assoc_bits)
1489 }
1490
1491 pub fn dtlb_l1_1gb_size(&self) -> u8 {
1493 get_bits(self.eax, 16, 27) as u8
1494 }
1495
1496 pub fn itlb_l1_1gb_associativity(&self) -> Associativity {
1498 let assoc_bits = get_bits(self.eax, 12, 15) as u8;
1499 Associativity::for_l2(assoc_bits)
1500 }
1501
1502 pub fn itlb_l1_1gb_size(&self) -> u8 {
1504 get_bits(self.eax, 0, 11) as u8
1505 }
1506
1507 pub fn dtlb_l2_1gb_associativity(&self) -> Associativity {
1509 let assoc_bits = get_bits(self.ebx, 28, 31) as u8;
1510 Associativity::for_l2(assoc_bits)
1511 }
1512
1513 pub fn dtlb_l2_1gb_size(&self) -> u8 {
1515 get_bits(self.ebx, 16, 27) as u8
1516 }
1517
1518 pub fn itlb_l2_1gb_associativity(&self) -> Associativity {
1520 let assoc_bits = get_bits(self.ebx, 12, 15) as u8;
1521 Associativity::for_l2(assoc_bits)
1522 }
1523
1524 pub fn itlb_l2_1gb_size(&self) -> u8 {
1526 get_bits(self.ebx, 0, 11) as u8
1527 }
1528}
1529
1530#[derive(PartialEq, Eq, Debug)]
1535pub struct PerformanceOptimizationInfo {
1536 eax: PerformanceOptimizationInfoEax,
1537 _ebx: u32,
1539 _ecx: u32,
1541 _edx: u32,
1543}
1544
1545impl PerformanceOptimizationInfo {
1546 pub(crate) fn new(data: CpuIdResult) -> Self {
1547 Self {
1548 eax: PerformanceOptimizationInfoEax::from_bits_truncate(data.eax),
1549 _ebx: data.ebx,
1550 _ecx: data.ecx,
1551 _edx: data.edx,
1552 }
1553 }
1554
1555 pub fn has_fp128(&self) -> bool {
1557 self.eax.contains(PerformanceOptimizationInfoEax::FP128)
1558 }
1559
1560 pub fn has_movu(&self) -> bool {
1563 self.eax.contains(PerformanceOptimizationInfoEax::MOVU)
1564 }
1565
1566 pub fn has_fp256(&self) -> bool {
1568 self.eax.contains(PerformanceOptimizationInfoEax::FP256)
1569 }
1570}
1571
1572bitflags! {
1573 #[repr(transparent)]
1574 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1575 struct PerformanceOptimizationInfoEax: u32 {
1576 const FP128 = 1 << 0;
1577 const MOVU = 1 << 1;
1578 const FP256 = 1 << 2;
1579 }
1580}
1581
1582#[derive(PartialEq, Eq, Debug)]
1587pub struct InstructionBasedSamplingCapabilities {
1588 eax: InstructionBasedSamplingCapabilitiesEax,
1589 _ebx: u32,
1591 _ecx: u32,
1593 _edx: u32,
1595}
1596
1597impl InstructionBasedSamplingCapabilities {
1598 pub(crate) fn new(data: CpuIdResult) -> Self {
1599 Self {
1600 eax: InstructionBasedSamplingCapabilitiesEax::from_bits_truncate(data.eax),
1601 _ebx: data.ebx,
1602 _ecx: data.ecx,
1603 _edx: data.edx,
1604 }
1605 }
1606
1607 pub fn has_feature_flags(&self) -> bool {
1609 self.eax
1610 .contains(InstructionBasedSamplingCapabilitiesEax::IBSFFV)
1611 }
1612
1613 pub fn has_fetch_sampling(&self) -> bool {
1615 self.eax
1616 .contains(InstructionBasedSamplingCapabilitiesEax::FETCH_SAM)
1617 }
1618
1619 pub fn has_execution_sampling(&self) -> bool {
1621 self.eax
1622 .contains(InstructionBasedSamplingCapabilitiesEax::OP_SAM)
1623 }
1624
1625 pub fn has_read_write_operation_counter(&self) -> bool {
1627 self.eax
1628 .contains(InstructionBasedSamplingCapabilitiesEax::RD_WR_OP_CNT)
1629 }
1630
1631 pub fn has_operation_counter(&self) -> bool {
1633 self.eax
1634 .contains(InstructionBasedSamplingCapabilitiesEax::OP_CNT)
1635 }
1636
1637 pub fn has_branch_target_address_reporting(&self) -> bool {
1639 self.eax
1640 .contains(InstructionBasedSamplingCapabilitiesEax::BRN_TRGT)
1641 }
1642
1643 pub fn has_operation_counter_extended(&self) -> bool {
1645 self.eax
1646 .contains(InstructionBasedSamplingCapabilitiesEax::OP_CNT_EXT)
1647 }
1648
1649 pub fn has_invalid_rip_indication(&self) -> bool {
1651 self.eax
1652 .contains(InstructionBasedSamplingCapabilitiesEax::RIP_INVALID_CHK)
1653 }
1654
1655 pub fn has_fused_branch_micro_op_indication(&self) -> bool {
1657 self.eax
1658 .contains(InstructionBasedSamplingCapabilitiesEax::OP_BRN_FUSE)
1659 }
1660
1661 pub fn has_l3_miss_filtering(&self) -> bool {
1663 self.eax
1664 .contains(InstructionBasedSamplingCapabilitiesEax::IBS_L3_MISS_FILTERING)
1665 }
1666}
1667
1668bitflags! {
1669 #[repr(transparent)]
1670 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1671 struct InstructionBasedSamplingCapabilitiesEax: u32 {
1672 const IBSFFV = 1 << 0;
1673 const FETCH_SAM = 1 << 1;
1674 const OP_SAM = 1 << 2;
1675 const RD_WR_OP_CNT = 1 << 3;
1676 const OP_CNT = 1 << 4;
1677 const BRN_TRGT = 1 << 5;
1678 const OP_CNT_EXT = 1 << 6;
1679 const RIP_INVALID_CHK = 1 << 7;
1680 const OP_BRN_FUSE = 1 << 8;
1681 const IBS_L3_MISS_FILTERING = 1 << 11;
1682 }
1683}
1684
1685#[derive(PartialEq, Eq)]
1690pub struct ProcessorTopologyInfo {
1691 eax: u32,
1692 ebx: u32,
1693 ecx: u32,
1694 _edx: u32,
1696}
1697
1698impl ProcessorTopologyInfo {
1699 pub(crate) fn new(data: CpuIdResult) -> Self {
1700 Self {
1701 eax: data.eax,
1702 ebx: data.ebx,
1703 ecx: data.ecx,
1704 _edx: data.edx,
1705 }
1706 }
1707
1708 pub fn x2apic_id(&self) -> u32 {
1710 self.eax
1711 }
1712
1713 pub fn core_id(&self) -> u8 {
1718 get_bits(self.ebx, 0, 7) as u8
1719 }
1720
1721 pub fn threads_per_core(&self) -> u8 {
1726 get_bits(self.ebx, 8, 15) as u8 + 1
1727 }
1728
1729 pub fn node_id(&self) -> u8 {
1731 get_bits(self.ecx, 0, 7) as u8
1732 }
1733
1734 pub fn nodes_per_processor(&self) -> u8 {
1736 get_bits(self.ecx, 8, 10) as u8 + 1
1737 }
1738}
1739
1740impl Debug for ProcessorTopologyInfo {
1741 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1742 f.debug_struct("ProcessorTopologyInfo")
1743 .field("x2apic_id", &self.x2apic_id())
1744 .field("core_id", &self.core_id())
1745 .field("threads_per_core", &self.threads_per_core())
1746 .field("node_id", &self.node_id())
1747 .field("nodes_per_processor", &self.nodes_per_processor())
1748 .finish()
1749 }
1750}
1751
1752#[derive(Debug, PartialEq, Eq)]
1757pub struct MemoryEncryptionInfo {
1758 eax: MemoryEncryptionInfoEax,
1759 ebx: u32,
1760 ecx: u32,
1761 edx: u32,
1762}
1763
1764impl MemoryEncryptionInfo {
1765 pub(crate) fn new(data: CpuIdResult) -> Self {
1766 Self {
1767 eax: MemoryEncryptionInfoEax::from_bits_truncate(data.eax),
1768 ebx: data.ebx,
1769 ecx: data.ecx,
1770 edx: data.edx,
1771 }
1772 }
1773
1774 pub fn has_sme(&self) -> bool {
1776 self.eax.contains(MemoryEncryptionInfoEax::SME)
1777 }
1778
1779 pub fn has_sev(&self) -> bool {
1781 self.eax.contains(MemoryEncryptionInfoEax::SEV)
1782 }
1783
1784 pub fn has_page_flush_msr(&self) -> bool {
1786 self.eax.contains(MemoryEncryptionInfoEax::PAGE_FLUSH_MSR)
1787 }
1788
1789 pub fn has_sev_es(&self) -> bool {
1791 self.eax.contains(MemoryEncryptionInfoEax::SEV_ES)
1792 }
1793
1794 pub fn has_sev_snp(&self) -> bool {
1796 self.eax.contains(MemoryEncryptionInfoEax::SEV_SNP)
1797 }
1798
1799 pub fn has_vmpl(&self) -> bool {
1801 self.eax.contains(MemoryEncryptionInfoEax::VMPL)
1802 }
1803
1804 pub fn has_hw_enforced_cache_coh(&self) -> bool {
1806 self.eax.contains(MemoryEncryptionInfoEax::HWENFCACHECOH)
1807 }
1808
1809 pub fn has_64bit_mode(&self) -> bool {
1811 self.eax.contains(MemoryEncryptionInfoEax::HOST64)
1812 }
1813
1814 pub fn has_restricted_injection(&self) -> bool {
1816 self.eax.contains(MemoryEncryptionInfoEax::RESTINJECT)
1817 }
1818
1819 pub fn has_alternate_injection(&self) -> bool {
1821 self.eax.contains(MemoryEncryptionInfoEax::ALTINJECT)
1822 }
1823
1824 pub fn has_debug_swap(&self) -> bool {
1826 self.eax.contains(MemoryEncryptionInfoEax::DBGSWP)
1827 }
1828
1829 pub fn has_prevent_host_ibs(&self) -> bool {
1831 self.eax.contains(MemoryEncryptionInfoEax::PREVHOSTIBS)
1832 }
1833
1834 pub fn has_vte(&self) -> bool {
1836 self.eax.contains(MemoryEncryptionInfoEax::VTE)
1837 }
1838
1839 pub fn c_bit_position(&self) -> u8 {
1841 get_bits(self.ebx, 0, 5) as u8
1842 }
1843
1844 pub fn physical_address_reduction(&self) -> u8 {
1846 get_bits(self.ebx, 6, 11) as u8
1847 }
1848
1849 pub fn max_encrypted_guests(&self) -> u32 {
1851 self.ecx
1852 }
1853
1854 pub fn min_sev_no_es_asid(&self) -> u32 {
1856 self.edx
1857 }
1858}
1859
1860bitflags! {
1861 #[repr(transparent)]
1862 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1863 struct MemoryEncryptionInfoEax: u32 {
1864 const SME = 1 << 0;
1865 const SEV = 1 << 1;
1866 const PAGE_FLUSH_MSR = 1 << 2;
1867 const SEV_ES = 1 << 3;
1868 const SEV_SNP = 1 << 4;
1869 const VMPL = 1 << 5;
1870 const HWENFCACHECOH = 1 << 10;
1871 const HOST64 = 1 << 11;
1872 const RESTINJECT = 1 << 12;
1873 const ALTINJECT = 1 << 13;
1874 const DBGSWP = 1 << 14;
1875 const PREVHOSTIBS = 1 << 15;
1876 const VTE = 1 << 16;
1877 }
1878}
1879
1880#[derive(PartialEq, Eq)]
1885pub struct PqosExtendedFeatureInfo<R: CpuIdReader> {
1886 read: R,
1887 _eax: u32,
1888 ebx: PqosExtendedFeatureInfoEbx,
1889 _ecx: u32,
1890 _edx: u32,
1891}
1892
1893impl<R: CpuIdReader> PqosExtendedFeatureInfo<R> {
1894 pub(crate) fn new(read: R) -> Self {
1895 let data = read.cpuid2(EAX_PQOS_EXTENDED_FEATURES, 0);
1896 Self {
1897 read,
1898 _eax: data.eax,
1899 ebx: PqosExtendedFeatureInfoEbx::from_bits_truncate(data.ebx),
1900 _ecx: data.ecx,
1901 _edx: data.edx,
1902 }
1903 }
1904
1905 pub fn has_l3mbe(&self) -> bool {
1907 self.ebx.contains(PqosExtendedFeatureInfoEbx::L3MBE)
1908 }
1909
1910 pub fn has_l3smbe(&self) -> bool {
1912 self.ebx.contains(PqosExtendedFeatureInfoEbx::L3SMBE)
1913 }
1914
1915 pub fn has_bmec(&self) -> bool {
1917 self.ebx.contains(PqosExtendedFeatureInfoEbx::BMEC)
1918 }
1919
1920 pub fn has_l3rr(&self) -> bool {
1923 self.ebx.contains(PqosExtendedFeatureInfoEbx::L3RR)
1924 }
1925
1926 pub fn has_abmc(&self) -> bool {
1928 self.ebx.contains(PqosExtendedFeatureInfoEbx::ABMC)
1929 }
1930
1931 pub fn has_sdciae(&self) -> bool {
1933 self.ebx.contains(PqosExtendedFeatureInfoEbx::SDCIAE)
1934 }
1935
1936 pub fn get_l3_memory_bandwidth_enforcement_info(
1938 &self,
1939 ) -> Option<L3MemoryBandwidthEnforcementInformation> {
1940 if self.has_l3mbe() {
1941 Some(L3MemoryBandwidthEnforcementInformation::new(
1942 self.read.cpuid2(EAX_PQOS_EXTENDED_FEATURES, 1),
1943 ))
1944 } else {
1945 None
1946 }
1947 }
1948
1949 pub fn get_l3_slow_memory_bandwidth_enforcement_info(
1951 &self,
1952 ) -> Option<L3MemoryBandwidthEnforcementInformation> {
1953 if self.has_l3smbe() {
1954 Some(L3MemoryBandwidthEnforcementInformation::new(
1955 self.read.cpuid2(EAX_PQOS_EXTENDED_FEATURES, 2),
1956 ))
1957 } else {
1958 None
1959 }
1960 }
1961
1962 pub fn get_bandwidth_monitoring_event_counters_info(
1964 &self,
1965 ) -> Option<BandwidthMonitoringEventCounters> {
1966 if self.has_bmec() {
1967 Some(BandwidthMonitoringEventCounters::new(
1968 self.read.cpuid2(EAX_PQOS_EXTENDED_FEATURES, 3),
1969 ))
1970 } else {
1971 None
1972 }
1973 }
1974
1975 pub fn get_assignable_bandwidth_monitoring_counters_info(
1977 &self,
1978 ) -> Option<AssignableBandwidthMonitoringCounterInfo> {
1979 if self.has_abmc() {
1980 Some(AssignableBandwidthMonitoringCounterInfo::new(
1981 self.read.cpuid2(EAX_PQOS_EXTENDED_FEATURES, 5),
1982 ))
1983 } else {
1984 None
1985 }
1986 }
1987}
1988
1989bitflags! {
1990 #[repr(transparent)]
1991 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
1992 struct PqosExtendedFeatureInfoEbx: u32 {
1993 const L3MBE = 1 << 1;
1994 const L3SMBE = 1 << 2;
1995 const BMEC = 1 << 3;
1996 const L3RR = 1 << 4;
1997 const ABMC = 1 << 5;
1998 const SDCIAE = 1 << 6;
1999 }
2000}
2001
2002bitflags! {
2003 #[repr(transparent)]
2004 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
2005 struct PqosExtendedFeatureInfoEbx5: u32 {
2006 const SELECT_COS = 1 << 0;
2007 }
2008}
2009
2010impl<R: CpuIdReader> Debug for PqosExtendedFeatureInfo<R> {
2011 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
2012 f.debug_struct("PqosExtendedFeatureInfo")
2013 .field("has_l3mbe", &self.has_l3mbe())
2014 .field("has_l3smbe", &self.has_l3smbe())
2015 .field("has_bmec", &self.has_bmec())
2016 .field("has_l3rr", &self.has_l3rr())
2017 .field("has_abmc", &self.has_abmc())
2018 .field("has_sdciae", &self.has_sdciae())
2019 .finish()
2020 }
2021}
2022
2023#[derive(PartialEq, Eq, Debug)]
2028pub struct L3MemoryBandwidthEnforcementInformation {
2029 eax: u32,
2030 _ebx: u32,
2031 _ecx: u32,
2032 edx: u32,
2033}
2034
2035impl L3MemoryBandwidthEnforcementInformation {
2036 pub(crate) fn new(data: CpuIdResult) -> Self {
2037 Self {
2038 eax: data.eax,
2039 _ebx: data.ebx,
2040 _ecx: data.ecx,
2041 edx: data.edx,
2042 }
2043 }
2044
2045 pub fn bandwidth_length(&self) -> u32 {
2048 self.eax
2049 }
2050
2051 pub fn cos_max(&self) -> u32 {
2053 self.edx
2054 }
2055}
2056
2057#[derive(PartialEq, Eq, Debug)]
2062pub struct BandwidthMonitoringEventCounters {
2063 _eax: u32,
2064 ebx: u32,
2065 ecx: BandwidthMonitoringEventCountersEcx,
2066 _edx: u32,
2067}
2068
2069impl BandwidthMonitoringEventCounters {
2070 pub(crate) fn new(data: CpuIdResult) -> Self {
2071 Self {
2072 _eax: data.eax,
2073 ebx: data.ebx,
2074 ecx: BandwidthMonitoringEventCountersEcx::from_bits_truncate(data.ecx),
2075 _edx: data.edx,
2076 }
2077 }
2078
2079 pub fn number_events(&self) -> u32 {
2081 get_bits(self.ebx, 0, 7)
2082 }
2083
2084 pub fn has_l3_cache_lcl_bw_fill_mon(&self) -> bool {
2086 self.ecx
2087 .contains(BandwidthMonitoringEventCountersEcx::L3_CACHE_LCL_BW_FILL_MON)
2088 }
2089
2090 pub fn has_l3_cache_rmt_bw_fill_mon(&self) -> bool {
2092 self.ecx
2093 .contains(BandwidthMonitoringEventCountersEcx::L3_CACHE_RMT_BW_FILL_MON)
2094 }
2095
2096 pub fn has_l3_cache_lcl_bw_nt_wr_mon(&self) -> bool {
2098 self.ecx
2099 .contains(BandwidthMonitoringEventCountersEcx::L3_CACHE_LCL_BW_NT_WR_MON)
2100 }
2101
2102 pub fn has_l3_cache_rmt_bw_nt_wr_mon(&self) -> bool {
2104 self.ecx
2105 .contains(BandwidthMonitoringEventCountersEcx::L3_CACHE_RMT_BW_NT_WR_MON)
2106 }
2107
2108 pub fn has_l3_cache_lcl_slow_bw_fill_mon(&self) -> bool {
2110 self.ecx
2111 .contains(BandwidthMonitoringEventCountersEcx::L3_CACHE_LCL_SLOW_BW_FILL_MON)
2112 }
2113
2114 pub fn has_l3_cache_rmt_slow_bw_fill_mon(&self) -> bool {
2116 self.ecx
2117 .contains(BandwidthMonitoringEventCountersEcx::L3_CACHE_RMT_SLOW_BW_FILL_MON)
2118 }
2119
2120 pub fn has_l3_cache_vic_mon(&self) -> bool {
2122 self.ecx
2123 .contains(BandwidthMonitoringEventCountersEcx::L3_CACHE_VIC_MON)
2124 }
2125}
2126
2127bitflags! {
2128 #[repr(transparent)]
2129 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
2130 struct BandwidthMonitoringEventCountersEcx: u32 {
2131 const L3_CACHE_LCL_BW_FILL_MON = 1 << 0;
2132 const L3_CACHE_RMT_BW_FILL_MON = 1 << 1;
2133 const L3_CACHE_LCL_BW_NT_WR_MON = 1 << 2;
2134 const L3_CACHE_RMT_BW_NT_WR_MON = 1 << 3;
2135 const L3_CACHE_LCL_SLOW_BW_FILL_MON = 1 << 4;
2136 const L3_CACHE_RMT_SLOW_BW_FILL_MON = 1 << 5;
2137 const L3_CACHE_VIC_MON = 1 << 6;
2138 }
2139}
2140
2141#[derive(PartialEq, Eq, Debug)]
2146pub struct AssignableBandwidthMonitoringCounterInfo {
2147 eax: u32,
2148 ebx: u32,
2149 ecx: u32,
2150 _edx: u32,
2151}
2152
2153impl AssignableBandwidthMonitoringCounterInfo {
2154 pub(crate) fn new(data: CpuIdResult) -> Self {
2155 Self {
2156 eax: data.eax,
2157 ebx: data.ebx,
2158 ecx: data.ecx,
2159 _edx: data.edx,
2160 }
2161 }
2162
2163 pub fn counter_size(&self) -> u8 {
2165 get_bits(self.eax, 0, 7) as u8
2166 }
2167
2168 pub fn has_overflow_bit(&self) -> bool {
2170 (self.eax & (1 << 8)) > 0
2171 }
2172
2173 pub fn max_abmc(&self) -> u16 {
2175 get_bits(self.ebx, 0, 15) as u16
2176 }
2177
2178 pub fn has_select_cos(&self) -> bool {
2181 (self.ecx & 1) > 0
2182 }
2183}
2184
2185#[derive(PartialEq, Eq, Debug)]
2190pub struct ExtendedFeatureIdentification2 {
2191 eax: ExtendedFeatureIdentification2Eax,
2192 ebx: u32,
2193 _ecx: u32,
2194 _edx: u32,
2195}
2196
2197impl ExtendedFeatureIdentification2 {
2198 pub(crate) fn new(data: CpuIdResult) -> Self {
2199 Self {
2200 eax: ExtendedFeatureIdentification2Eax::from_bits_truncate(data.eax),
2201 ebx: data.ebx,
2202 _ecx: data.ecx,
2203 _edx: data.edx,
2204 }
2205 }
2206
2207 pub fn has_no_nested_data_bp(&self) -> bool {
2209 self.eax
2210 .contains(ExtendedFeatureIdentification2Eax::NO_NESTED_DATA_BP)
2211 }
2212
2213 pub fn has_lfence_always_serializing(&self) -> bool {
2215 self.eax
2216 .contains(ExtendedFeatureIdentification2Eax::LFENCE_ALWAYS_SERIALIZING)
2217 }
2218
2219 pub fn has_smm_pg_cfg_lock(&self) -> bool {
2221 self.eax
2222 .contains(ExtendedFeatureIdentification2Eax::SMM_PG_CFG_LOCK)
2223 }
2224
2225 pub fn has_null_select_clears_base(&self) -> bool {
2228 self.eax
2229 .contains(ExtendedFeatureIdentification2Eax::NULL_SELECT_CLEARS_BASE)
2230 }
2231
2232 pub fn has_upper_address_ignore(&self) -> bool {
2234 self.eax
2235 .contains(ExtendedFeatureIdentification2Eax::UPPER_ADDRESS_IGNORE)
2236 }
2237
2238 pub fn has_automatic_ibrs(&self) -> bool {
2240 self.eax
2241 .contains(ExtendedFeatureIdentification2Eax::AUTOMATIC_IBRS)
2242 }
2243
2244 pub fn has_no_smm_ctl_msr(&self) -> bool {
2246 self.eax
2247 .contains(ExtendedFeatureIdentification2Eax::NO_SMM_CTL_MSR)
2248 }
2249
2250 pub fn has_prefetch_ctl_msr(&self) -> bool {
2252 self.eax
2253 .contains(ExtendedFeatureIdentification2Eax::PREFETCH_CTL_MSR)
2254 }
2255
2256 pub fn has_cpuid_user_dis(&self) -> bool {
2258 self.eax
2259 .contains(ExtendedFeatureIdentification2Eax::CPUID_USER_DIS)
2260 }
2261
2262 pub fn microcode_patch_size(&self) -> u16 {
2265 get_bits(self.ebx, 0, 11) as u16
2266 }
2267}
2268
2269bitflags! {
2270 #[repr(transparent)]
2271 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
2272 struct ExtendedFeatureIdentification2Eax: u32 {
2273 const NO_NESTED_DATA_BP = 1 << 0;
2274 const LFENCE_ALWAYS_SERIALIZING = 1 << 2;
2275 const SMM_PG_CFG_LOCK = 1 << 3;
2276 const NULL_SELECT_CLEARS_BASE = 1 << 6;
2277 const UPPER_ADDRESS_IGNORE = 1 << 7;
2278 const AUTOMATIC_IBRS = 1 << 8;
2279 const NO_SMM_CTL_MSR = 1 << 9;
2280 const PREFETCH_CTL_MSR = 1 << 13;
2281 const CPUID_USER_DIS = 1 << 17;
2282 }
2283}
2284
2285#[derive(PartialEq, Eq, Debug)]
2290pub struct ExtendedPerformanceMonitoringDebug {
2291 eax: ExtendedPerformanceMonitoringDebugEax,
2292 ebx: u32,
2293 _ecx: u32,
2294 _edx: u32,
2295}
2296
2297impl ExtendedPerformanceMonitoringDebug {
2298 pub(crate) fn new(data: CpuIdResult) -> Self {
2299 Self {
2300 eax: ExtendedPerformanceMonitoringDebugEax::from_bits_truncate(data.eax),
2301 ebx: data.ebx,
2302 _ecx: data.ecx,
2303 _edx: data.edx,
2304 }
2305 }
2306
2307 pub fn has_perf_mon_v2(&self) -> bool {
2309 self.eax
2310 .contains(ExtendedPerformanceMonitoringDebugEax::PERF_MON_V2)
2311 }
2312
2313 pub fn has_lbr_stack(&self) -> bool {
2315 self.eax
2316 .contains(ExtendedPerformanceMonitoringDebugEax::LBR_STACK)
2317 }
2318
2319 pub fn has_lbr_and_pmc_freeze(&self) -> bool {
2322 self.eax
2323 .contains(ExtendedPerformanceMonitoringDebugEax::LBR_AND_PMC_FREEZE)
2324 }
2325
2326 pub fn num_perf_ctr_core(&self) -> u8 {
2328 get_bits(self.ebx, 0, 3) as u8
2329 }
2330
2331 pub fn num_lbr_stack_size(&self) -> u8 {
2333 get_bits(self.ebx, 4, 9) as u8
2334 }
2335
2336 pub fn num_perf_ctr_nb(&self) -> u8 {
2338 get_bits(self.ebx, 10, 15) as u8
2339 }
2340}
2341
2342bitflags! {
2343 #[repr(transparent)]
2344 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
2345 struct ExtendedPerformanceMonitoringDebugEax: u32 {
2346 const PERF_MON_V2 = 1 << 0;
2347 const LBR_STACK = 1 << 1;
2348 const LBR_AND_PMC_FREEZE = 1 << 2;
2349 }
2350}
2351
2352#[derive(PartialEq, Eq, Debug)]
2357pub struct MultiKeyEncryptedMemoryCapabilities {
2358 eax: MultiKeyEncryptedMemoryCapabilitiesEax,
2359 ebx: u32,
2360 _ecx: u32,
2361 _edx: u32,
2362}
2363
2364impl MultiKeyEncryptedMemoryCapabilities {
2365 pub(crate) fn new(data: CpuIdResult) -> Self {
2366 Self {
2367 eax: MultiKeyEncryptedMemoryCapabilitiesEax::from_bits_truncate(data.eax),
2368 ebx: data.ebx,
2369 _ecx: data.ecx,
2370 _edx: data.edx,
2371 }
2372 }
2373
2374 pub fn has_mem_hmk(&self) -> bool {
2376 self.eax
2377 .contains(MultiKeyEncryptedMemoryCapabilitiesEax::MEM_HMK)
2378 }
2379
2380 pub fn max_mem_hmk_encr_key_id(&self) -> u16 {
2383 get_bits(self.ebx, 0, 15) as u16
2384 }
2385}
2386
2387bitflags! {
2388 #[repr(transparent)]
2389 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
2390 struct MultiKeyEncryptedMemoryCapabilitiesEax: u32 {
2391 const MEM_HMK = 1 << 0;
2392 }
2393}
2394
2395#[derive(Clone)]
2405pub struct ExtendedCpuTopologyIter<R: CpuIdReader> {
2406 read: R,
2407 level: u32,
2408}
2409
2410impl<R: CpuIdReader> ExtendedCpuTopologyIter<R> {
2411 pub fn new(read: R) -> Self {
2412 Self { read, level: 0 }
2413 }
2414}
2415
2416#[derive(PartialEq, Eq, Debug)]
2418pub struct ExtendedCpuTopologyLevel {
2419 eax: u32,
2420 ebx: u32,
2421 ecx: u32,
2422 edx: u32,
2423}
2424
2425impl ExtendedCpuTopologyLevel {
2426 pub(crate) fn new(data: CpuIdResult) -> Self {
2427 Self {
2428 eax: data.eax,
2429 ebx: data.ebx,
2430 ecx: data.ecx,
2431 edx: data.edx,
2432 }
2433 }
2434
2435 pub fn mask_width(&self) -> u8 {
2438 get_bits(self.eax, 0, 4) as u8
2439 }
2440
2441 pub fn has_efficiency_ranking_available(&self) -> bool {
2444 self.eax & (1 << 29) > 0
2445 }
2446
2447 pub fn has_heterogeneous_cores(&self) -> bool {
2450 self.eax & (1 << 30) > 0
2451 }
2452
2453 pub fn has_asymmetric_topology(&self) -> bool {
2456 self.eax & (1 << 31) > 0
2457 }
2458
2459 pub fn num_logical_processors(&self) -> u16 {
2461 get_bits(self.ebx, 0, 15) as u16
2462 }
2463
2464 pub fn pwr_efficiency_ranking(&self) -> u8 {
2468 get_bits(self.ebx, 16, 23) as u8
2469 }
2470
2471 pub fn native_mode_id(&self) -> u8 {
2477 get_bits(self.ebx, 24, 27) as u8
2478 }
2479
2480 pub fn core_type(&self) -> u8 {
2486 get_bits(self.ebx, 28, 31) as u8
2487 }
2488
2489 pub fn input_ecx(&self) -> u8 {
2491 get_bits(self.ecx, 0, 7) as u8
2492 }
2493
2494 pub fn level_type(&self) -> HierarchyLevelType {
2496 HierarchyLevelType::from(get_bits(self.ecx, 8, 15) as u8)
2497 }
2498
2499 pub fn extended_apic_id(&self) -> u32 {
2501 self.edx
2502 }
2503}
2504
2505impl<R: CpuIdReader> Iterator for ExtendedCpuTopologyIter<R> {
2506 type Item = ExtendedCpuTopologyLevel;
2507
2508 fn next(&mut self) -> Option<ExtendedCpuTopologyLevel> {
2509 let res = self.read.cpuid2(EAX_EXTENDED_CPU_TOPOLOGY, self.level);
2510 self.level += 1;
2511
2512 let ect = ExtendedCpuTopologyLevel::new(res);
2513 if ect.level_type() == HierarchyLevelType::Reserved {
2514 None
2515 } else {
2516 Some(ect)
2517 }
2518 }
2519}
2520
2521#[repr(u8)]
2522#[derive(PartialEq, Eq)]
2523pub enum HierarchyLevelType {
2524 Reserved = 0,
2525 Core = 1,
2526 Complex = 2,
2527 Die = 3,
2528 Socket = 4,
2529 Unknown(u8),
2530}
2531
2532impl From<u8> for HierarchyLevelType {
2533 fn from(value: u8) -> Self {
2534 match value {
2535 0 => Self::Reserved,
2536 1 => Self::Core,
2537 2 => Self::Complex,
2538 3 => Self::Die,
2539 4 => Self::Socket,
2540 x => Self::Unknown(x),
2541 }
2542 }
2543}
2544
2545impl Display for HierarchyLevelType {
2546 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
2547 match self {
2548 HierarchyLevelType::Reserved => write!(f, "Reserved (0)"),
2549 HierarchyLevelType::Core => write!(f, "Core (1)"),
2550 HierarchyLevelType::Complex => write!(f, "Complex (2)"),
2551 HierarchyLevelType::Die => write!(f, "DIE (3)"),
2552 HierarchyLevelType::Socket => write!(f, "Socket (4)"),
2553 HierarchyLevelType::Unknown(x) => write!(f, "Unknown ({x})"),
2554 }
2555 }
2556}
2557
2558impl Debug for HierarchyLevelType {
2559 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
2560 match self {
2561 Self::Reserved => write!(f, "Reserved"),
2562 Self::Core => write!(f, "Core"),
2563 Self::Complex => write!(f, "Complex"),
2564 Self::Die => write!(f, "Die"),
2565 Self::Socket => write!(f, "Socket"),
2566 Self::Unknown(arg0) => f.debug_tuple("Unknown").field(arg0).finish(),
2567 }
2568 }
2569}