1use crate::bindings::*;
7use crate::HV_PAGE_SIZE;
8#[cfg(feature = "with-serde")]
9use serde_derive::{Deserialize, Serialize};
10use std::convert::TryFrom;
11use std::fmt;
12use std::ptr;
13use vmm_sys_util::errno;
14use zerocopy::{FromBytes, IntoBytes};
15
16#[repr(C)]
17#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
18#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
19pub struct StandardRegisters {
20 pub rax: u64,
21 pub rbx: u64,
22 pub rcx: u64,
23 pub rdx: u64,
24 pub rsi: u64,
25 pub rdi: u64,
26 pub rsp: u64,
27 pub rbp: u64,
28 pub r8: u64,
29 pub r9: u64,
30 pub r10: u64,
31 pub r11: u64,
32 pub r12: u64,
33 pub r13: u64,
34 pub r14: u64,
35 pub r15: u64,
36 pub rip: u64,
37 pub rflags: u64,
38}
39
40#[repr(C)]
41#[derive(Debug, Default, Copy, Clone, IntoBytes, FromBytes)]
42#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
43pub struct SegmentRegister {
44 pub base: u64,
46 pub limit: u32,
47 pub selector: u16,
48 pub type_: u8, pub present: u8, pub dpl: u8, pub db: u8, pub s: u8, pub l: u8, pub g: u8, pub avl: u8, pub unusable: __u8,
57 pub padding: __u8,
58}
59
60impl From<hv_x64_segment_register> for SegmentRegister {
61 fn from(hv_reg: hv_x64_segment_register) -> Self {
62 let mut reg = SegmentRegister {
63 base: hv_reg.base,
64 limit: hv_reg.limit,
65 selector: hv_reg.selector,
66 unusable: 0_u8,
67 padding: 0_u8,
68 ..Default::default()
69 };
70
71 unsafe {
74 reg.type_ = hv_reg.__bindgen_anon_1.__bindgen_anon_1.segment_type() as u8;
75 reg.present = hv_reg.__bindgen_anon_1.__bindgen_anon_1.present() as u8;
76 reg.dpl = hv_reg
77 .__bindgen_anon_1
78 .__bindgen_anon_1
79 .descriptor_privilege_level() as u8;
80 reg.db = hv_reg.__bindgen_anon_1.__bindgen_anon_1._default() as u8;
81 reg.s = hv_reg
82 .__bindgen_anon_1
83 .__bindgen_anon_1
84 .non_system_segment() as u8;
85 reg.l = hv_reg.__bindgen_anon_1.__bindgen_anon_1._long() as u8;
86 reg.g = hv_reg.__bindgen_anon_1.__bindgen_anon_1.granularity() as u8;
87 reg.avl = hv_reg.__bindgen_anon_1.__bindgen_anon_1.available() as u8;
88 }
89
90 reg
91 }
92}
93impl From<SegmentRegister> for hv_x64_segment_register {
94 fn from(reg: SegmentRegister) -> Self {
95 let mut hv_reg = hv_x64_segment_register {
96 base: reg.base,
97 limit: reg.limit,
98 selector: reg.selector,
99 ..Default::default()
100 };
101
102 unsafe {
105 hv_reg
106 .__bindgen_anon_1
107 .__bindgen_anon_1
108 .set_segment_type(reg.type_ as u16);
109 hv_reg
110 .__bindgen_anon_1
111 .__bindgen_anon_1
112 .set_present(reg.present as u16);
113 hv_reg
114 .__bindgen_anon_1
115 .__bindgen_anon_1
116 .set_descriptor_privilege_level(reg.dpl as u16);
117 hv_reg
118 .__bindgen_anon_1
119 .__bindgen_anon_1
120 .set__default(reg.db as u16);
121 hv_reg
122 .__bindgen_anon_1
123 .__bindgen_anon_1
124 .set_non_system_segment(reg.s as u16);
125 hv_reg
126 .__bindgen_anon_1
127 .__bindgen_anon_1
128 .set__long(reg.l as u16);
129 hv_reg
130 .__bindgen_anon_1
131 .__bindgen_anon_1
132 .set_granularity(reg.g as u16);
133 hv_reg
134 .__bindgen_anon_1
135 .__bindgen_anon_1
136 .set_available(reg.avl as u16);
137 }
138
139 hv_reg
140 }
141}
142
143#[repr(C)]
144#[derive(Debug, Default, Copy, Clone, FromBytes)]
145#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
146pub struct TableRegister {
147 pub base: u64,
148 pub limit: u16,
149}
150
151impl From<hv_x64_table_register> for TableRegister {
152 fn from(reg: hv_x64_table_register) -> Self {
153 TableRegister {
154 base: reg.base,
155 limit: reg.limit,
156 }
157 }
158}
159
160impl From<TableRegister> for hv_x64_table_register {
161 fn from(reg: TableRegister) -> Self {
162 hv_x64_table_register {
163 limit: reg.limit,
164 base: reg.base,
165 pad: [0; 3],
166 }
167 }
168}
169
170#[repr(C)]
171#[derive(Debug, Default, Copy, Clone, FromBytes)]
172#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
173pub struct SpecialRegisters {
174 pub cs: SegmentRegister,
175 pub ds: SegmentRegister,
176 pub es: SegmentRegister,
177 pub fs: SegmentRegister,
178 pub gs: SegmentRegister,
179 pub ss: SegmentRegister,
180 pub tr: SegmentRegister,
181 pub ldt: SegmentRegister,
182 pub gdt: TableRegister,
183 pub idt: TableRegister,
184 pub cr0: u64,
185 pub cr2: u64,
186 pub cr3: u64,
187 pub cr4: u64,
188 pub cr8: u64,
189 pub efer: u64,
190 pub apic_base: u64,
191 pub interrupt_bitmap: [u64; 4usize],
192}
193
194#[repr(C)]
195#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
196#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
197pub struct DebugRegisters {
198 pub dr0: u64,
199 pub dr1: u64,
200 pub dr2: u64,
201 pub dr3: u64,
202 pub dr6: u64,
203 pub dr7: u64,
204}
205
206#[repr(C)]
207#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
208#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
209pub struct FloatingPointUnit {
210 pub fpr: [[u8; 16usize]; 8usize],
211 pub fcw: u16,
212 pub fsw: u16,
213 pub ftwx: u8,
214 pub pad1: u8,
215 pub last_opcode: u16,
216 pub last_ip: u64,
217 pub last_dp: u64,
218 pub xmm: [[u8; 16usize]; 16usize],
219 pub mxcsr: u32,
220 pub pad2: u32,
221}
222
223pub const IA32_MSR_TSC: u32 = 0x00000010;
224pub const IA32_MSR_EFER: u32 = 0xC0000080;
225pub const IA32_MSR_KERNEL_GS_BASE: u32 = 0xC0000102;
226pub const IA32_MSR_APIC_BASE: u32 = 0x0000001B;
227pub const IA32_MSR_PAT: u32 = 0x0277;
228pub const IA32_MSR_SYSENTER_CS: u32 = 0x00000174;
229pub const IA32_MSR_SYSENTER_ESP: u32 = 0x00000175;
230pub const IA32_MSR_SYSENTER_EIP: u32 = 0x00000176;
231pub const IA32_MSR_STAR: u32 = 0xC0000081;
232pub const IA32_MSR_LSTAR: u32 = 0xC0000082;
233pub const IA32_MSR_CSTAR: u32 = 0xC0000083;
234pub const IA32_MSR_SFMASK: u32 = 0xC0000084;
235
236pub const IA32_MSR_MTRR_CAP: u32 = 0x00FE;
237pub const IA32_MSR_MTRR_DEF_TYPE: u32 = 0x02FF;
238pub const IA32_MSR_MTRR_PHYSBASE0: u32 = 0x0200;
239pub const IA32_MSR_MTRR_PHYSMASK0: u32 = 0x0201;
240pub const IA32_MSR_MTRR_PHYSBASE1: u32 = 0x0202;
241pub const IA32_MSR_MTRR_PHYSMASK1: u32 = 0x0203;
242pub const IA32_MSR_MTRR_PHYSBASE2: u32 = 0x0204;
243pub const IA32_MSR_MTRR_PHYSMASK2: u32 = 0x0205;
244pub const IA32_MSR_MTRR_PHYSBASE3: u32 = 0x0206;
245pub const IA32_MSR_MTRR_PHYSMASK3: u32 = 0x0207;
246pub const IA32_MSR_MTRR_PHYSBASE4: u32 = 0x0208;
247pub const IA32_MSR_MTRR_PHYSMASK4: u32 = 0x0209;
248pub const IA32_MSR_MTRR_PHYSBASE5: u32 = 0x020A;
249pub const IA32_MSR_MTRR_PHYSMASK5: u32 = 0x020B;
250pub const IA32_MSR_MTRR_PHYSBASE6: u32 = 0x020C;
251pub const IA32_MSR_MTRR_PHYSMASK6: u32 = 0x020D;
252pub const IA32_MSR_MTRR_PHYSBASE7: u32 = 0x020E;
253pub const IA32_MSR_MTRR_PHYSMASK7: u32 = 0x020F;
254
255pub const IA32_MSR_MTRR_FIX64K_00000: u32 = 0x0250;
256pub const IA32_MSR_MTRR_FIX16K_80000: u32 = 0x0258;
257pub const IA32_MSR_MTRR_FIX16K_A0000: u32 = 0x0259;
258pub const IA32_MSR_MTRR_FIX4K_C0000: u32 = 0x0268;
259pub const IA32_MSR_MTRR_FIX4K_C8000: u32 = 0x0269;
260pub const IA32_MSR_MTRR_FIX4K_D0000: u32 = 0x026A;
261pub const IA32_MSR_MTRR_FIX4K_D8000: u32 = 0x026B;
262pub const IA32_MSR_MTRR_FIX4K_E0000: u32 = 0x026C;
263pub const IA32_MSR_MTRR_FIX4K_E8000: u32 = 0x026D;
264pub const IA32_MSR_MTRR_FIX4K_F0000: u32 = 0x026E;
265pub const IA32_MSR_MTRR_FIX4K_F8000: u32 = 0x026F;
266
267pub const IA32_MSR_TSC_AUX: u32 = 0xC0000103;
268pub const IA32_MSR_BNDCFGS: u32 = 0x00000d90;
269pub const IA32_MSR_DEBUG_CTL: u32 = 0x1D9;
270pub const IA32_MSR_SPEC_CTRL: u32 = 0x00000048;
271pub const IA32_MSR_TSC_ADJUST: u32 = 0x0000003b;
272
273pub const IA32_MSR_MISC_ENABLE: u32 = 0x000001a0;
274
275pub fn msr_to_hv_reg_name(msr: u32) -> Result<::std::os::raw::c_uint, &'static str> {
276 match msr {
277 IA32_MSR_TSC => Ok(hv_register_name_HV_X64_REGISTER_TSC),
278
279 IA32_MSR_EFER => Ok(hv_register_name_HV_X64_REGISTER_EFER),
280 IA32_MSR_KERNEL_GS_BASE => Ok(hv_register_name_HV_X64_REGISTER_KERNEL_GS_BASE),
281 IA32_MSR_APIC_BASE => Ok(hv_register_name_HV_X64_REGISTER_APIC_BASE),
282 IA32_MSR_PAT => Ok(hv_register_name_HV_X64_REGISTER_PAT),
283 IA32_MSR_SYSENTER_CS => Ok(hv_register_name_HV_X64_REGISTER_SYSENTER_CS),
284 IA32_MSR_SYSENTER_ESP => Ok(hv_register_name_HV_X64_REGISTER_SYSENTER_ESP),
285 IA32_MSR_SYSENTER_EIP => Ok(hv_register_name_HV_X64_REGISTER_SYSENTER_EIP),
286 IA32_MSR_STAR => Ok(hv_register_name_HV_X64_REGISTER_STAR),
287 IA32_MSR_LSTAR => Ok(hv_register_name_HV_X64_REGISTER_LSTAR),
288 IA32_MSR_CSTAR => Ok(hv_register_name_HV_X64_REGISTER_CSTAR),
289 IA32_MSR_SFMASK => Ok(hv_register_name_HV_X64_REGISTER_SFMASK),
290
291 IA32_MSR_MTRR_CAP => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_CAP),
292 IA32_MSR_MTRR_DEF_TYPE => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_DEF_TYPE),
293 IA32_MSR_MTRR_PHYSBASE0 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE0),
294 IA32_MSR_MTRR_PHYSMASK0 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK0),
295 IA32_MSR_MTRR_PHYSBASE1 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE1),
296 IA32_MSR_MTRR_PHYSMASK1 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK1),
297 IA32_MSR_MTRR_PHYSBASE2 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE2),
298 IA32_MSR_MTRR_PHYSMASK2 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK2),
299 IA32_MSR_MTRR_PHYSBASE3 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE3),
300 IA32_MSR_MTRR_PHYSMASK3 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK3),
301 IA32_MSR_MTRR_PHYSBASE4 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE4),
302 IA32_MSR_MTRR_PHYSMASK4 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK4),
303 IA32_MSR_MTRR_PHYSBASE5 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE5),
304 IA32_MSR_MTRR_PHYSMASK5 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK5),
305 IA32_MSR_MTRR_PHYSBASE6 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE6),
306 IA32_MSR_MTRR_PHYSMASK6 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK6),
307 IA32_MSR_MTRR_PHYSBASE7 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_BASE7),
308 IA32_MSR_MTRR_PHYSMASK7 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_PHYS_MASK7),
309
310 IA32_MSR_MTRR_FIX64K_00000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX64K00000),
311 IA32_MSR_MTRR_FIX16K_80000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX16K80000),
312 IA32_MSR_MTRR_FIX16K_A0000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX16KA0000),
313 IA32_MSR_MTRR_FIX4K_C0000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KC0000),
314 IA32_MSR_MTRR_FIX4K_C8000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KC8000),
315 IA32_MSR_MTRR_FIX4K_D0000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KD0000),
316 IA32_MSR_MTRR_FIX4K_D8000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KD8000),
317 IA32_MSR_MTRR_FIX4K_E0000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KE0000),
318 IA32_MSR_MTRR_FIX4K_E8000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KE8000),
319 IA32_MSR_MTRR_FIX4K_F0000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KF0000),
320 IA32_MSR_MTRR_FIX4K_F8000 => Ok(hv_register_name_HV_X64_REGISTER_MSR_MTRR_FIX4KF8000),
321
322 IA32_MSR_TSC_AUX => Ok(hv_register_name_HV_X64_REGISTER_TSC_AUX),
323 IA32_MSR_BNDCFGS => Ok(hv_register_name_HV_X64_REGISTER_BNDCFGS),
324 IA32_MSR_DEBUG_CTL => Ok(hv_register_name_HV_X64_REGISTER_DEBUG_CTL),
325 IA32_MSR_TSC_ADJUST => Ok(hv_register_name_HV_X64_REGISTER_TSC_ADJUST),
326 IA32_MSR_SPEC_CTRL => Ok(hv_register_name_HV_X64_REGISTER_SPEC_CTRL),
327 HV_X64_MSR_GUEST_OS_ID => Ok(hv_register_name_HV_REGISTER_GUEST_OS_ID),
328 HV_X64_MSR_SINT0 => Ok(hv_register_name_HV_REGISTER_SINT0),
329 HV_X64_MSR_SINT1 => Ok(hv_register_name_HV_REGISTER_SINT1),
330 HV_X64_MSR_SINT2 => Ok(hv_register_name_HV_REGISTER_SINT2),
331 HV_X64_MSR_SINT3 => Ok(hv_register_name_HV_REGISTER_SINT3),
332 HV_X64_MSR_SINT4 => Ok(hv_register_name_HV_REGISTER_SINT4),
333 HV_X64_MSR_SINT5 => Ok(hv_register_name_HV_REGISTER_SINT5),
334 HV_X64_MSR_SINT6 => Ok(hv_register_name_HV_REGISTER_SINT6),
335 HV_X64_MSR_SINT7 => Ok(hv_register_name_HV_REGISTER_SINT7),
336 HV_X64_MSR_SINT8 => Ok(hv_register_name_HV_REGISTER_SINT8),
337 HV_X64_MSR_SINT9 => Ok(hv_register_name_HV_REGISTER_SINT9),
338 HV_X64_MSR_SINT10 => Ok(hv_register_name_HV_REGISTER_SINT10),
339 HV_X64_MSR_SINT11 => Ok(hv_register_name_HV_REGISTER_SINT11),
340 HV_X64_MSR_SINT12 => Ok(hv_register_name_HV_REGISTER_SINT12),
341 HV_X64_MSR_SINT13 => Ok(hv_register_name_HV_REGISTER_SINT13),
342 HV_X64_MSR_SINT14 => Ok(hv_register_name_HV_REGISTER_SINT14),
343 HV_X64_MSR_SINT15 => Ok(hv_register_name_HV_REGISTER_SINT15),
344 IA32_MSR_MISC_ENABLE => Ok(hv_register_name_HV_X64_REGISTER_MSR_IA32_MISC_ENABLE),
345 HV_X64_MSR_SCONTROL => Ok(hv_register_name_HV_REGISTER_SCONTROL),
346 HV_X64_MSR_SIEFP => Ok(hv_register_name_HV_REGISTER_SIEFP),
347 HV_X64_MSR_SIMP => Ok(hv_register_name_HV_REGISTER_SIMP),
348 HV_X64_MSR_REFERENCE_TSC => Ok(hv_register_name_HV_REGISTER_REFERENCE_TSC),
349 HV_X64_MSR_EOM => Ok(hv_register_name_HV_REGISTER_EOM),
350 _ => Err("Not a supported hv_register_name msr"),
351 }
352}
353
354#[repr(C)]
355#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
356#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
357pub struct msr_entry {
358 pub index: u32,
359 pub reserved: u32,
360 pub data: u64,
361}
362
363#[repr(C)]
364#[derive(Debug, Default)]
365#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
366pub struct msrs {
367 pub nmsrs: u32,
368 #[cfg_attr(feature = "with-serde", serde(skip))]
369 pub pad: u32,
370 #[cfg_attr(feature = "with-serde", serde(skip))]
371 pub entries: __IncompleteArrayField<msr_entry>,
372}
373
374#[repr(C)]
375#[derive(Debug, Default)]
376pub struct msr_list {
377 pub nmsrs: u32,
378 pub indices: __IncompleteArrayField<u32>,
379}
380
381#[repr(C)]
382#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
383#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
384pub struct VcpuEvents {
385 pub pending_interruption: u64,
386 pub interrupt_state: u64,
387 pub internal_activity_state: u64,
388 pub pending_event0: [u8; 16usize],
389 pub pending_event1: [u8; 16usize],
390}
391
392#[repr(C)]
393#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
394#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
395pub struct Xcrs {
396 pub xcr0: u64,
397}
398
399#[repr(C)]
400#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
401pub struct hv_cpuid_entry {
402 pub function: __u32,
403 pub index: __u32,
404 pub flags: __u32,
405 pub eax: __u32,
406 pub ebx: __u32,
407 pub ecx: __u32,
408 pub edx: __u32,
409 pub padding: [__u32; 3usize],
410}
411
412#[repr(C)]
413#[derive(Debug, Default)]
414#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
415pub struct hv_cpuid {
416 pub nent: __u32,
417 #[cfg_attr(feature = "with-serde", serde(skip))]
418 pub padding: __u32,
419 #[cfg_attr(feature = "with-serde", serde(skip))]
420 pub entries: __IncompleteArrayField<hv_cpuid_entry>,
421}
422
423pub const LOCAL_APIC_OFFSET_APIC_ID: isize = 0x20; pub const LOCAL_APIC_OFFSET_VERSION: isize = 0x30; pub const LOCAL_APIC_OFFSET_TPR: isize = 0x80; pub const LOCAL_APIC_OFFSET_APR: isize = 0x90; pub const LOCAL_APIC_OFFSET_PPR: isize = 0xA0; pub const LOCAL_APIC_OFFSET_EOI: isize = 0xB0; pub const LOCAL_APIC_OFFSET_REMOTE_READ: isize = 0xC0; pub const LOCAL_APIC_OFFSET_LDR: isize = 0xD0; pub const LOCAL_APIC_OFFSET_DFR: isize = 0xE0; pub const LOCAL_APIC_OFFSET_SPURIOUS: isize = 0xF0; pub const LOCAL_APIC_OFFSET_ISR: isize = 0x100; pub const LOCAL_APIC_OFFSET_TMR: isize = 0x180; pub const LOCAL_APIC_OFFSET_IRR: isize = 0x200; pub const LOCAL_APIC_OFFSET_ERROR: isize = 0x280; pub const LOCAL_APIC_OFFSET_ICR_LOW: isize = 0x300; pub const LOCAL_APIC_OFFSET_ICR_HIGH: isize = 0x310; pub const LOCAL_APIC_OFFSET_TIMER_LVT: isize = 0x320; pub const LOCAL_APIC_OFFSET_THERMAL_LVT: isize = 0x330; pub const LOCAL_APIC_OFFSET_PERFMON_LVT: isize = 0x340; pub const LOCAL_APIC_OFFSET_LINT0_LVT: isize = 0x350; pub const LOCAL_APIC_OFFSET_LINT1_LVT: isize = 0x360; pub const LOCAL_APIC_OFFSET_ERROR_LVT: isize = 0x370; pub const LOCAL_APIC_OFFSET_INITIAL_COUNT: isize = 0x380; pub const LOCAL_APIC_OFFSET_CURRENT_COUNT: isize = 0x390; pub const LOCAL_APIC_OFFSET_DIVIDER: isize = 0x3e0; pub const LOCAL_X2APIC_OFFSET_SELF_IPI: isize = 0x3f0; pub struct Buffer {
451 pub layout: std::alloc::Layout,
452 pub buf: *mut u8,
453}
454
455impl Buffer {
456 pub fn new(size: usize, align: usize) -> Result<Buffer, errno::Error> {
457 let layout = std::alloc::Layout::from_size_align(size, align).unwrap();
458 let buf = unsafe { std::alloc::alloc(layout) };
460 if buf.is_null() {
461 return Err(errno::Error::new(libc::ENOMEM));
462 }
463
464 let buf = Buffer { layout, buf };
465
466 Ok(buf)
467 }
468
469 pub fn dealloc(&mut self) {
470 unsafe {
472 std::alloc::dealloc(self.buf, self.layout);
473 }
474 }
475
476 pub fn size(&self) -> usize {
477 self.layout.size()
478 }
479
480 pub fn zero_out_buf(&mut self) {
481 unsafe {
483 ::std::ptr::write_bytes(self.buf, 0u8, self.size());
484 }
485 }
486}
487
488impl Drop for Buffer {
489 fn drop(&mut self) {
490 self.dealloc();
491 }
492}
493
494#[repr(C)]
495#[derive(Copy, Clone, Debug, IntoBytes, FromBytes)]
496pub struct LapicState {
498 pub regs: [::std::os::raw::c_char; 1024usize],
499}
500
501impl Default for LapicState {
502 fn default() -> Self {
503 unsafe { ::std::mem::zeroed() }
504 }
505}
506
507#[repr(C)]
508#[derive(Copy, Clone, Debug, IntoBytes, FromBytes)]
509pub struct XSave {
511 pub buffer: [u8; 4096usize],
512}
513
514impl Default for XSave {
515 fn default() -> Self {
516 unsafe { ::std::mem::zeroed() }
517 }
518}
519
520impl TryFrom<Buffer> for XSave {
521 type Error = errno::Error;
522 fn try_from(buf: Buffer) -> Result<Self, Self::Error> {
523 let mut ret = XSave {
524 ..Default::default()
525 };
526 let ret_size = std::mem::size_of_val(&ret.buffer);
527 if ret_size < buf.size() {
528 return Err(errno::Error::new(libc::EINVAL));
529 }
530 unsafe { ptr::copy(buf.buf, ret.buffer.as_mut_ptr(), buf.size()) };
532 Ok(ret)
533 }
534}
535
536impl TryFrom<&XSave> for Buffer {
537 type Error = errno::Error;
538 fn try_from(reg: &XSave) -> Result<Self, Self::Error> {
539 let reg_size = std::mem::size_of_val(®.buffer);
540 let num_pages = (reg_size + HV_PAGE_SIZE - 1) >> HV_HYP_PAGE_SHIFT;
541 let buffer = Buffer::new(num_pages * HV_PAGE_SIZE, HV_PAGE_SIZE)?;
542 unsafe { ptr::copy(reg.buffer.as_ptr(), buffer.buf, reg_size) };
544 Ok(buffer)
545 }
546}
547
548impl TryFrom<Buffer> for LapicState {
549 type Error = errno::Error;
550 fn try_from(buf: Buffer) -> Result<Self, Self::Error> {
551 let mut ret: LapicState = LapicState::default();
552 let state = ret.regs.as_mut_ptr();
553 if buf.size() < std::mem::size_of::<hv_local_interrupt_controller_state>() {
554 return Err(errno::Error::new(libc::EINVAL));
555 }
556 unsafe {
558 let hv_state = &*(buf.buf as *const hv_local_interrupt_controller_state);
559 *(state.offset(LOCAL_APIC_OFFSET_APIC_ID) as *mut u32) = hv_state.apic_id;
560 *(state.offset(LOCAL_APIC_OFFSET_VERSION) as *mut u32) = hv_state.apic_version;
561 *(state.offset(LOCAL_APIC_OFFSET_REMOTE_READ) as *mut u32) = hv_state.apic_remote_read;
562 *(state.offset(LOCAL_APIC_OFFSET_LDR) as *mut u32) = hv_state.apic_ldr;
563 *(state.offset(LOCAL_APIC_OFFSET_DFR) as *mut u32) = hv_state.apic_dfr;
564 *(state.offset(LOCAL_APIC_OFFSET_SPURIOUS) as *mut u32) = hv_state.apic_spurious;
565 *(state.offset(LOCAL_APIC_OFFSET_ERROR) as *mut u32) = hv_state.apic_esr;
566 *(state.offset(LOCAL_APIC_OFFSET_ICR_LOW) as *mut u32) = hv_state.apic_icr_low;
567 *(state.offset(LOCAL_APIC_OFFSET_ICR_HIGH) as *mut u32) = hv_state.apic_icr_high;
568 *(state.offset(LOCAL_APIC_OFFSET_TIMER_LVT) as *mut u32) = hv_state.apic_lvt_timer;
569 *(state.offset(LOCAL_APIC_OFFSET_THERMAL_LVT) as *mut u32) = hv_state.apic_lvt_thermal;
570 *(state.offset(LOCAL_APIC_OFFSET_PERFMON_LVT) as *mut u32) = hv_state.apic_lvt_perfmon;
571 *(state.offset(LOCAL_APIC_OFFSET_LINT0_LVT) as *mut u32) = hv_state.apic_lvt_lint0;
572 *(state.offset(LOCAL_APIC_OFFSET_LINT1_LVT) as *mut u32) = hv_state.apic_lvt_lint1;
573 *(state.offset(LOCAL_APIC_OFFSET_ERROR_LVT) as *mut u32) = hv_state.apic_lvt_error;
574 *(state.offset(LOCAL_APIC_OFFSET_INITIAL_COUNT) as *mut u32) =
575 hv_state.apic_initial_count;
576 *(state.offset(LOCAL_APIC_OFFSET_CURRENT_COUNT) as *mut u32) =
577 hv_state.apic_counter_value;
578 *(state.offset(LOCAL_APIC_OFFSET_DIVIDER) as *mut u32) =
579 hv_state.apic_divide_configuration;
580
581 for i in 0..8 {
583 *(state.offset(LOCAL_APIC_OFFSET_ISR + i * 16) as *mut u32) =
584 hv_state.apic_isr[i as usize];
585 *(state.offset(LOCAL_APIC_OFFSET_TMR + i * 16) as *mut u32) =
586 hv_state.apic_tmr[i as usize];
587 *(state.offset(LOCAL_APIC_OFFSET_IRR + i * 16) as *mut u32) =
588 hv_state.apic_irr[i as usize];
589 }
590
591 let mut isrv: u32 = 0;
593 for i in (0..8).rev() {
594 let val: u32 = hv_state.apic_isr[i as usize];
595 if val != 0 {
596 isrv = 31 - val.leading_zeros(); isrv += i * 4 * 8; break;
599 }
600 }
601
602 *(state.offset(LOCAL_APIC_OFFSET_PPR) as *mut u32) = isrv;
604 }
605 Ok(ret)
606 }
607}
608
609impl TryFrom<&LapicState> for Buffer {
610 type Error = errno::Error;
611 fn try_from(reg: &LapicState) -> Result<Self, Self::Error> {
612 let hv_state_size = std::mem::size_of::<hv_local_interrupt_controller_state>();
613 let num_pages = (hv_state_size + HV_PAGE_SIZE - 1) >> HV_HYP_PAGE_SHIFT;
614 let buffer = Buffer::new(num_pages * HV_PAGE_SIZE, HV_PAGE_SIZE)?;
615 unsafe {
617 let state = reg.regs.as_ptr();
618 let hv_state = &mut *(buffer.buf as *mut hv_local_interrupt_controller_state);
619 *hv_state = hv_local_interrupt_controller_state {
620 apic_id: *(state.offset(LOCAL_APIC_OFFSET_APIC_ID) as *mut u32),
621 apic_version: *(state.offset(LOCAL_APIC_OFFSET_VERSION) as *mut u32),
622 apic_remote_read: *(state.offset(LOCAL_APIC_OFFSET_REMOTE_READ) as *mut u32),
623 apic_ldr: *(state.offset(LOCAL_APIC_OFFSET_LDR) as *mut u32),
624 apic_dfr: *(state.offset(LOCAL_APIC_OFFSET_DFR) as *mut u32),
625 apic_spurious: *(state.offset(LOCAL_APIC_OFFSET_SPURIOUS) as *mut u32),
626 apic_esr: *(state.offset(LOCAL_APIC_OFFSET_ERROR) as *mut u32),
627 apic_icr_low: *(state.offset(LOCAL_APIC_OFFSET_ICR_LOW) as *mut u32),
628 apic_icr_high: *(state.offset(LOCAL_APIC_OFFSET_ICR_HIGH) as *mut u32),
629 apic_lvt_timer: *(state.offset(LOCAL_APIC_OFFSET_TIMER_LVT) as *mut u32),
630 apic_lvt_thermal: *(state.offset(LOCAL_APIC_OFFSET_THERMAL_LVT) as *mut u32),
631 apic_lvt_perfmon: *(state.offset(LOCAL_APIC_OFFSET_PERFMON_LVT) as *mut u32),
632 apic_lvt_lint0: *(state.offset(LOCAL_APIC_OFFSET_LINT0_LVT) as *mut u32),
633 apic_lvt_lint1: *(state.offset(LOCAL_APIC_OFFSET_LINT1_LVT) as *mut u32),
634 apic_lvt_error: *(state.offset(LOCAL_APIC_OFFSET_ERROR_LVT) as *mut u32),
635 apic_initial_count: *(state.offset(LOCAL_APIC_OFFSET_INITIAL_COUNT) as *mut u32),
636 apic_counter_value: *(state.offset(LOCAL_APIC_OFFSET_CURRENT_COUNT) as *mut u32),
637 apic_divide_configuration: *(state.offset(LOCAL_APIC_OFFSET_DIVIDER) as *mut u32),
638 apic_error_status: 0,
639 apic_lvt_cmci: 0,
640 apic_isr: [0; 8],
641 apic_tmr: [0; 8],
642 apic_irr: [0; 8],
643 };
644
645 for i in 0..8 {
647 hv_state.apic_isr[i as usize] =
648 *(state.offset(LOCAL_APIC_OFFSET_ISR + i * 16) as *mut u32);
649 hv_state.apic_tmr[i as usize] =
650 *(state.offset(LOCAL_APIC_OFFSET_TMR + i * 16) as *mut u32);
651 hv_state.apic_irr[i as usize] =
652 *(state.offset(LOCAL_APIC_OFFSET_IRR + i * 16) as *mut u32);
653 }
654 }
655
656 Ok(buffer)
657 }
658}
659
660impl fmt::Display for XSave {
662 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
663 write!(
664 f,
665 "buffer: {:?}\n data: {:02X?}",
666 self.buffer.as_ptr(),
667 self.buffer,
668 )
669 }
670}
671
672#[repr(C)]
673#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
674#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
675pub struct SuspendRegisters {
676 pub explicit_register: u64,
677 pub intercept_register: u64,
678}
679
680#[repr(C)]
681#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, IntoBytes, FromBytes)]
682#[cfg_attr(feature = "with-serde", derive(Deserialize, Serialize))]
683pub struct MiscRegs {
684 pub hypercall: u64,
685}
686
687const fn initialize_comp_sizes() -> [usize; MSHV_VP_STATE_COUNT as usize] {
688 let mut vp_state_comp_size = [0; MSHV_VP_STATE_COUNT as usize];
689
690 vp_state_comp_size[MSHV_VP_STATE_LAPIC as usize] = std::mem::size_of::<LapicState>();
691 vp_state_comp_size[MSHV_VP_STATE_XSAVE as usize] = std::mem::size_of::<XSave>();
692 vp_state_comp_size[MSHV_VP_STATE_SIMP as usize] = HV_PAGE_SIZE; vp_state_comp_size[MSHV_VP_STATE_SIEFP as usize] = HV_PAGE_SIZE; vp_state_comp_size[MSHV_VP_STATE_SYNTHETIC_TIMERS as usize] =
699 std::mem::size_of::<hv_synthetic_timers_state>();
700
701 vp_state_comp_size
702}
703
704const VP_STATE_COMP_SIZES: [usize; MSHV_VP_STATE_COUNT as usize] = initialize_comp_sizes();
711
712pub const VP_STATE_COMPONENTS_BUFFER_SIZE: usize = VP_STATE_COMP_SIZES
713 [MSHV_VP_STATE_LAPIC as usize]
714 + VP_STATE_COMP_SIZES[MSHV_VP_STATE_XSAVE as usize]
715 + VP_STATE_COMP_SIZES[MSHV_VP_STATE_SIMP as usize]
716 + VP_STATE_COMP_SIZES[MSHV_VP_STATE_SIEFP as usize]
717 + VP_STATE_COMP_SIZES[MSHV_VP_STATE_SYNTHETIC_TIMERS as usize];
718
719#[inline(always)]
720fn get_vp_state_comp_start_offset(index: usize) -> usize {
721 VP_STATE_COMP_SIZES[0..index].iter().copied().sum()
722}
723
724#[repr(C)]
729#[derive(Copy, Clone, Debug, IntoBytes, FromBytes)]
730pub struct AllVpStateComponents {
732 pub buffer: [u8; VP_STATE_COMPONENTS_BUFFER_SIZE],
733}
734
735impl Default for AllVpStateComponents {
736 fn default() -> Self {
737 unsafe { ::std::mem::zeroed() }
738 }
739}
740
741impl AllVpStateComponents {
742 pub fn copy_to_or_from_buffer(&mut self, index: usize, buffer: &mut Buffer, to_buffer: bool) {
743 let len: usize = VP_STATE_COMP_SIZES[index];
744
745 if len > buffer.size() {
746 panic!("Invalid buffer length for state components");
747 }
748
749 let start = get_vp_state_comp_start_offset(index);
750 let end = start + len;
751
752 if to_buffer {
753 unsafe { ptr::copy(self.buffer[start..end].as_ptr(), buffer.buf, len) };
755 } else {
756 unsafe { ptr::copy(buffer.buf, self.buffer[start..end].as_mut_ptr(), len) };
758 }
759 }
760}
761
762#[macro_export]
763macro_rules! set_gp_regs_field_ptr {
764 ($this: ident, $name: ident, $value: expr) => {
765 #[allow(clippy::macro_metavars_in_unsafe)]
766 unsafe {
768 (*$this)
769 .__bindgen_anon_1
770 .__bindgen_anon_1
771 .__bindgen_anon_1
772 .__bindgen_anon_1
773 .$name = $value;
774 }
775 };
776}
777
778#[macro_export]
779macro_rules! get_gp_regs_field_ptr {
780 ($this: ident, $name: ident) => {
781 (*$this)
782 .__bindgen_anon_1
783 .__bindgen_anon_1
784 .__bindgen_anon_1
785 .__bindgen_anon_1
786 .$name
787 };
788}
789
790#[cfg(test)]
791mod tests {
792 use super::*;
793 use std::slice::from_raw_parts_mut;
794
795 #[test]
796 fn test_all_vp_state_components_copy_to_buffer() {
797 let mut states: AllVpStateComponents = AllVpStateComponents::default();
798 let mut buffer = Buffer::new(HV_PAGE_SIZE, HV_PAGE_SIZE).unwrap();
799
800 for i in 0..VP_STATE_COMPONENTS_BUFFER_SIZE {
801 states.buffer[i] = 0xB9;
802 }
803
804 for i in 0..MSHV_VP_STATE_COUNT {
806 let len = VP_STATE_COMP_SIZES[i as usize];
807 let start = get_vp_state_comp_start_offset(i as usize);
808 let end = start + len;
809 states.copy_to_or_from_buffer(i as usize, &mut buffer, true);
810 let buf_arr = unsafe { std::slice::from_raw_parts(buffer.buf, len) };
812 assert!(states.buffer[start..end]
813 .iter()
814 .zip(buf_arr)
815 .all(|(a, b)| a == b));
816 }
817 }
818
819 #[test]
820 fn test_all_vp_state_components_copy_from_buffer() {
821 let mut states: AllVpStateComponents = AllVpStateComponents::default();
822 let buffer = Buffer::new(HV_PAGE_SIZE, HV_PAGE_SIZE).unwrap();
823 let mut copy_buffer = Buffer::new(HV_PAGE_SIZE, HV_PAGE_SIZE).unwrap();
824
825 let mut_buf = unsafe { from_raw_parts_mut(buffer.buf, buffer.layout.size()) };
828 for itm in mut_buf.iter_mut().take(HV_PAGE_SIZE) {
829 *itm = 0xA5;
830 }
831
832 unsafe { ptr::copy(mut_buf.as_mut_ptr(), copy_buffer.buf, HV_PAGE_SIZE) };
834
835 for i in 0..MSHV_VP_STATE_COUNT {
837 let len = VP_STATE_COMP_SIZES[i as usize];
838 let start = get_vp_state_comp_start_offset(i as usize);
839 let end = start + len;
840
841 states.copy_to_or_from_buffer(i as usize, &mut copy_buffer, false);
842 let buf_arr = &mut_buf[0..len];
843 assert!(states.buffer[start..end]
844 .iter()
845 .zip(buf_arr)
846 .all(|(a, b)| a == b));
847 }
848 }
849}