1#![expect(missing_docs)]
7#![forbid(unsafe_code)]
8#![no_std]
9
10pub mod vbs;
11
12use bitfield_struct::bitfield;
13use core::fmt::Debug;
14use core::mem::size_of;
15use open_enum::open_enum;
16use static_assertions::const_assert;
17use zerocopy::FromBytes;
18use zerocopy::FromZeros;
19use zerocopy::Immutable;
20use zerocopy::IntoBytes;
21use zerocopy::KnownLayout;
22
23pub const HV_PAGE_SIZE: u64 = 4096;
24pub const HV_PAGE_SIZE_USIZE: usize = 4096;
25pub const HV_PAGE_SHIFT: u64 = 12;
26
27pub const HV_PARTITION_ID_SELF: u64 = u64::MAX;
28pub const HV_VP_INDEX_SELF: u32 = 0xfffffffe;
29
30pub const HV_CPUID_FUNCTION_VERSION_AND_FEATURES: u32 = 0x00000001;
31pub const HV_CPUID_FUNCTION_HV_VENDOR_AND_MAX_FUNCTION: u32 = 0x40000000;
32pub const HV_CPUID_FUNCTION_HV_INTERFACE: u32 = 0x40000001;
33pub const HV_CPUID_FUNCTION_MS_HV_VERSION: u32 = 0x40000002;
34pub const HV_CPUID_FUNCTION_MS_HV_FEATURES: u32 = 0x40000003;
35pub const HV_CPUID_FUNCTION_MS_HV_ENLIGHTENMENT_INFORMATION: u32 = 0x40000004;
36pub const HV_CPUID_FUNCTION_MS_HV_IMPLEMENTATION_LIMITS: u32 = 0x40000005;
37pub const HV_CPUID_FUNCTION_MS_HV_HARDWARE_FEATURES: u32 = 0x40000006;
38pub const HV_CPUID_FUNCTION_MS_HV_ISOLATION_CONFIGURATION: u32 = 0x4000000C;
39
40pub const VIRTUALIZATION_STACK_CPUID_VENDOR: u32 = 0x40000080;
41pub const VIRTUALIZATION_STACK_CPUID_INTERFACE: u32 = 0x40000081;
42pub const VIRTUALIZATION_STACK_CPUID_PROPERTIES: u32 = 0x40000082;
43
44pub const VS1_PARTITION_PROPERTIES_EAX_IS_PORTABLE: u32 = 0x000000001;
49pub const VS1_PARTITION_PROPERTIES_EAX_DEBUG_DEVICE_PRESENT: u32 = 0x000000002;
51pub const VS1_PARTITION_PROPERTIES_EAX_EXTENDED_IOAPIC_RTE: u32 = 0x000000004;
53pub const VS1_PARTITION_PROPERTIES_EAX_CONFIDENTIAL_VMBUS_AVAILABLE: u32 = 0x000000008;
55
56pub const VENDOR_HYP_UID_MS_HYPERVISOR: [u32; 4] = [0x4d32ba58, 0xcd244764, 0x8eef6c75, 0x16597024];
58
59#[bitfield(u64)]
60pub struct HvPartitionPrivilege {
61 pub access_vp_runtime_msr: bool,
63 pub access_partition_reference_counter: bool,
64 pub access_synic_msrs: bool,
65 pub access_synthetic_timer_msrs: bool,
66 pub access_apic_msrs: bool,
67 pub access_hypercall_msrs: bool,
68 pub access_vp_index: bool,
69 pub access_reset_msr: bool,
70 pub access_stats_msr: bool,
71 pub access_partition_reference_tsc: bool,
72 pub access_guest_idle_msr: bool,
73 pub access_frequency_msrs: bool,
74 pub access_debug_msrs: bool,
75 pub access_reenlightenment_ctrls: bool,
76 pub access_root_scheduler_msr: bool,
77 pub access_tsc_invariant_controls: bool,
78 _reserved1: u16,
79
80 pub create_partitions: bool,
82 pub access_partition_id: bool,
83 pub access_memory_pool: bool,
84 pub adjust_message_buffers: bool,
85 pub post_messages: bool,
86 pub signal_events: bool,
87 pub create_port: bool,
88 pub connect_port: bool,
89 pub access_stats: bool,
90 #[bits(2)]
91 _reserved2: u64,
92 pub debugging: bool,
93 pub cpu_management: bool,
94 pub configure_profiler: bool,
95 pub access_vp_exit_tracing: bool,
96 pub enable_extended_gva_ranges_flush_va_list: bool,
97 pub access_vsm: bool,
98 pub access_vp_registers: bool,
99 _unused_bit: bool,
100 pub fast_hypercall_output: bool,
101 pub enable_extended_hypercalls: bool,
102 pub start_virtual_processor: bool,
103 pub isolation: bool,
104 #[bits(9)]
105 _reserved3: u64,
106}
107
108open_enum! {
109 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
110 pub enum HvPartitionIsolationType: u8 {
111 NONE = 0,
112 VBS = 1,
113 SNP = 2,
114 TDX = 3,
115 }
116}
117
118#[bitfield(u128)]
119#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
120pub struct HvFeatures {
121 #[bits(64)]
122 pub privileges: HvPartitionPrivilege,
123
124 #[bits(4)]
125 pub max_supported_cstate: u32,
126 pub hpet_needed_for_c3_power_state_deprecated: bool,
127 pub invariant_mperf_available: bool,
128 pub supervisor_shadow_stack_available: bool,
129 pub arch_pmu_available: bool,
130 pub exception_trap_intercept_available: bool,
131 #[bits(23)]
132 reserved: u32,
133
134 pub mwait_available_deprecated: bool,
135 pub guest_debugging_available: bool,
136 pub performance_monitors_available: bool,
137 pub cpu_dynamic_partitioning_available: bool,
138 pub xmm_registers_for_fast_hypercall_available: bool,
139 pub guest_idle_available: bool,
140 pub hypervisor_sleep_state_support_available: bool,
141 pub numa_distance_query_available: bool,
142 pub frequency_regs_available: bool,
143 pub synthetic_machine_check_available: bool,
144 pub guest_crash_regs_available: bool,
145 pub debug_regs_available: bool,
146 pub npiep1_available: bool,
147 pub disable_hypervisor_available: bool,
148 pub extended_gva_ranges_for_flush_virtual_address_list_available: bool,
149 pub fast_hypercall_output_available: bool,
150 pub svm_features_available: bool,
151 pub sint_polling_mode_available: bool,
152 pub hypercall_msr_lock_available: bool,
153 pub direct_synthetic_timers: bool,
154 pub register_pat_available: bool,
155 pub register_bndcfgs_available: bool,
156 pub watchdog_timer_available: bool,
157 pub synthetic_time_unhalted_timer_available: bool,
158 pub device_domains_available: bool, pub s1_device_domains_available: bool, pub lbr_available: bool,
161 pub ipt_available: bool,
162 pub cross_vtl_flush_available: bool,
163 pub idle_spec_ctrl_available: bool,
164 pub translate_gva_flags_available: bool,
165 pub apic_eoi_intercept_available: bool,
166}
167
168impl HvFeatures {
169 pub fn from_cpuid(cpuid: [u32; 4]) -> Self {
170 zerocopy::transmute!(cpuid)
171 }
172
173 pub fn into_cpuid(self) -> [u32; 4] {
174 zerocopy::transmute!(self)
175 }
176}
177
178#[bitfield(u128)]
179#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
180pub struct HvEnlightenmentInformation {
181 pub use_hypercall_for_address_space_switch: bool,
182 pub use_hypercall_for_local_flush: bool,
183 pub use_hypercall_for_remote_flush_and_local_flush_entire: bool,
184 pub use_apic_msrs: bool,
185 pub use_hv_register_for_reset: bool,
186 pub use_relaxed_timing: bool,
187 pub use_dma_remapping_deprecated: bool,
188 pub use_interrupt_remapping_deprecated: bool,
189 pub use_x2_apic_msrs: bool,
190 pub deprecate_auto_eoi: bool,
191 pub use_synthetic_cluster_ipi: bool,
192 pub use_ex_processor_masks: bool,
193 pub nested: bool,
194 pub use_int_for_mbec_system_calls: bool,
195 pub use_vmcs_enlightenments: bool,
196 pub use_synced_timeline: bool,
197 pub core_scheduler_requested: bool,
198 pub use_direct_local_flush_entire: bool,
199 pub no_non_architectural_core_sharing: bool,
200 pub use_x2_apic: bool,
201 pub restore_time_on_resume: bool,
202 pub use_hypercall_for_mmio_access: bool,
203 pub use_gpa_pinning_hypercall: bool,
204 pub wake_vps: bool,
205 _reserved: u8,
206 pub long_spin_wait_count: u32,
207 #[bits(7)]
208 pub implemented_physical_address_bits: u32,
209 #[bits(25)]
210 _reserved1: u32,
211 _reserved2: u32,
212}
213
214impl HvEnlightenmentInformation {
215 pub fn from_cpuid(cpuid: [u32; 4]) -> Self {
216 zerocopy::transmute!(cpuid)
217 }
218
219 pub fn into_cpuid(self) -> [u32; 4] {
220 zerocopy::transmute!(self)
221 }
222}
223
224#[bitfield(u128)]
225pub struct HvHardwareFeatures {
226 pub apic_overlay_assist_in_use: bool,
227 pub msr_bitmaps_in_use: bool,
228 pub architectural_performance_counters_in_use: bool,
229 pub second_level_address_translation_in_use: bool,
230 pub dma_remapping_in_use: bool,
231 pub interrupt_remapping_in_use: bool,
232 pub memory_patrol_scrubber_present: bool,
233 pub dma_protection_in_use: bool,
234 pub hpet_requested: bool,
235 pub synthetic_timers_volatile: bool,
236 #[bits(4)]
237 pub hypervisor_level: u32,
238 pub physical_destination_mode_required: bool,
239 pub use_vmfunc_for_alias_map_switch: bool,
240 pub hv_register_for_memory_zeroing_supported: bool,
241 pub unrestricted_guest_supported: bool,
242 pub rdt_afeatures_supported: bool,
243 pub rdt_mfeatures_supported: bool,
244 pub child_perfmon_pmu_supported: bool,
245 pub child_perfmon_lbr_supported: bool,
246 pub child_perfmon_ipt_supported: bool,
247 pub apic_emulation_supported: bool,
248 pub child_x2_apic_recommended: bool,
249 pub hardware_watchdog_reserved: bool,
250 pub device_access_tracking_supported: bool,
251 pub hardware_gpa_access_tracking_supported: bool,
252 #[bits(4)]
253 _reserved: u32,
254
255 pub device_domain_input_width: u8,
256 #[bits(24)]
257 _reserved1: u32,
258 _reserved2: u32,
259 _reserved3: u32,
260}
261
262#[bitfield(u128)]
263pub struct HvIsolationConfiguration {
264 pub paravisor_present: bool,
265 #[bits(31)]
266 pub _reserved0: u32,
267
268 #[bits(4)]
269 pub isolation_type: u8,
270 _reserved11: bool,
271 pub shared_gpa_boundary_active: bool,
272 #[bits(6)]
273 pub shared_gpa_boundary_bits: u8,
274 #[bits(20)]
275 _reserved12: u32,
276 _reserved2: u32,
277 _reserved3: u32,
278}
279
280open_enum! {
281 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
282 pub enum HypercallCode: u16 {
283 #![expect(non_upper_case_globals)]
284
285 HvCallSwitchVirtualAddressSpace = 0x0001,
286 HvCallFlushVirtualAddressSpace = 0x0002,
287 HvCallFlushVirtualAddressList = 0x0003,
288 HvCallNotifyLongSpinWait = 0x0008,
289 HvCallInvokeHypervisorDebugger = 0x000a,
290 HvCallSendSyntheticClusterIpi = 0x000b,
291 HvCallModifyVtlProtectionMask = 0x000c,
292 HvCallEnablePartitionVtl = 0x000d,
293 HvCallEnableVpVtl = 0x000f,
294 HvCallVtlCall = 0x0011,
295 HvCallVtlReturn = 0x0012,
296 HvCallFlushVirtualAddressSpaceEx = 0x0013,
297 HvCallFlushVirtualAddressListEx = 0x0014,
298 HvCallSendSyntheticClusterIpiEx = 0x0015,
299 HvCallInstallIntercept = 0x004d,
300 HvCallGetVpRegisters = 0x0050,
301 HvCallSetVpRegisters = 0x0051,
302 HvCallTranslateVirtualAddress = 0x0052,
303 HvCallPostMessage = 0x005C,
304 HvCallSignalEvent = 0x005D,
305 HvCallOutputDebugCharacter = 0x0071,
306 HvCallGetSystemProperty = 0x007b,
307 HvCallRetargetDeviceInterrupt = 0x007e,
308 HvCallNotifyPartitionEvent = 0x0087,
309 HvCallAssertVirtualInterrupt = 0x0094,
310 HvCallStartVirtualProcessor = 0x0099,
311 HvCallGetVpIndexFromApicId = 0x009A,
312 HvCallTranslateVirtualAddressEx = 0x00AC,
313 HvCallCheckForIoIntercept = 0x00ad,
314 HvCallFlushGuestPhysicalAddressSpace = 0x00AF,
315 HvCallFlushGuestPhysicalAddressList = 0x00B0,
316 HvCallSignalEventDirect = 0x00C0,
317 HvCallPostMessageDirect = 0x00C1,
318 HvCallCheckSparseGpaPageVtlAccess = 0x00D4,
319 HvCallAcceptGpaPages = 0x00D9,
320 HvCallModifySparseGpaPageHostVisibility = 0x00DB,
321 HvCallRestorePartitionTime = 0x0103,
322 HvCallMemoryMappedIoRead = 0x0106,
323 HvCallMemoryMappedIoWrite = 0x0107,
324 HvCallPinGpaPageRanges = 0x0112,
325 HvCallUnpinGpaPageRanges = 0x0113,
326 HvCallQuerySparseGpaPageHostVisibility = 0x011C,
327
328 HvExtCallQueryCapabilities = 0x8001,
330
331 HvCallVbsVmCallReport = 0xC001,
333 }
334}
335
336pub const HV_X64_MSR_GUEST_OS_ID: u32 = 0x40000000;
337pub const HV_X64_MSR_HYPERCALL: u32 = 0x40000001;
338pub const HV_X64_MSR_VP_INDEX: u32 = 0x40000002;
339pub const HV_X64_MSR_TIME_REF_COUNT: u32 = 0x40000020;
340pub const HV_X64_MSR_REFERENCE_TSC: u32 = 0x40000021;
341pub const HV_X64_MSR_TSC_FREQUENCY: u32 = 0x40000022;
342pub const HV_X64_MSR_APIC_FREQUENCY: u32 = 0x40000023;
343pub const HV_X64_MSR_EOI: u32 = 0x40000070;
344pub const HV_X64_MSR_ICR: u32 = 0x40000071;
345pub const HV_X64_MSR_TPR: u32 = 0x40000072;
346pub const HV_X64_MSR_VP_ASSIST_PAGE: u32 = 0x40000073;
347pub const HV_X64_MSR_SCONTROL: u32 = 0x40000080;
348pub const HV_X64_MSR_SVERSION: u32 = 0x40000081;
349pub const HV_X64_MSR_SIEFP: u32 = 0x40000082;
350pub const HV_X64_MSR_SIMP: u32 = 0x40000083;
351pub const HV_X64_MSR_EOM: u32 = 0x40000084;
352pub const HV_X64_MSR_SINT0: u32 = 0x40000090;
353pub const HV_X64_MSR_SINT1: u32 = 0x40000091;
354pub const HV_X64_MSR_SINT2: u32 = 0x40000092;
355pub const HV_X64_MSR_SINT3: u32 = 0x40000093;
356pub const HV_X64_MSR_SINT4: u32 = 0x40000094;
357pub const HV_X64_MSR_SINT5: u32 = 0x40000095;
358pub const HV_X64_MSR_SINT6: u32 = 0x40000096;
359pub const HV_X64_MSR_SINT7: u32 = 0x40000097;
360pub const HV_X64_MSR_SINT8: u32 = 0x40000098;
361pub const HV_X64_MSR_SINT9: u32 = 0x40000099;
362pub const HV_X64_MSR_SINT10: u32 = 0x4000009a;
363pub const HV_X64_MSR_SINT11: u32 = 0x4000009b;
364pub const HV_X64_MSR_SINT12: u32 = 0x4000009c;
365pub const HV_X64_MSR_SINT13: u32 = 0x4000009d;
366pub const HV_X64_MSR_SINT14: u32 = 0x4000009e;
367pub const HV_X64_MSR_SINT15: u32 = 0x4000009f;
368pub const HV_X64_MSR_STIMER0_CONFIG: u32 = 0x400000b0;
369pub const HV_X64_MSR_STIMER0_COUNT: u32 = 0x400000b1;
370pub const HV_X64_MSR_STIMER1_CONFIG: u32 = 0x400000b2;
371pub const HV_X64_MSR_STIMER1_COUNT: u32 = 0x400000b3;
372pub const HV_X64_MSR_STIMER2_CONFIG: u32 = 0x400000b4;
373pub const HV_X64_MSR_STIMER2_COUNT: u32 = 0x400000b5;
374pub const HV_X64_MSR_STIMER3_CONFIG: u32 = 0x400000b6;
375pub const HV_X64_MSR_STIMER3_COUNT: u32 = 0x400000b7;
376pub const HV_X64_MSR_GUEST_IDLE: u32 = 0x400000F0;
377pub const HV_X64_MSR_GUEST_CRASH_P0: u32 = 0x40000100;
378pub const HV_X64_MSR_GUEST_CRASH_P1: u32 = 0x40000101;
379pub const HV_X64_MSR_GUEST_CRASH_P2: u32 = 0x40000102;
380pub const HV_X64_MSR_GUEST_CRASH_P3: u32 = 0x40000103;
381pub const HV_X64_MSR_GUEST_CRASH_P4: u32 = 0x40000104;
382pub const HV_X64_MSR_GUEST_CRASH_CTL: u32 = 0x40000105;
383
384pub const HV_X64_GUEST_CRASH_PARAMETER_MSRS: usize = 5;
385
386#[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, PartialEq, Eq)]
390#[repr(transparent)]
391pub struct HvStatus(pub u16);
392
393impl HvStatus {
394 pub const SUCCESS: Self = Self(0);
396
397 pub fn result(self) -> HvResult<()> {
400 if let Ok(err) = self.0.try_into() {
401 Err(HvError(err))
402 } else {
403 Ok(())
404 }
405 }
406
407 pub fn is_ok(self) -> bool {
409 self == Self::SUCCESS
410 }
411
412 pub fn is_err(self) -> bool {
414 self != Self::SUCCESS
415 }
416
417 const fn from_bits(bits: u16) -> Self {
418 Self(bits)
419 }
420
421 const fn into_bits(self) -> u16 {
422 self.0
423 }
424}
425
426impl From<Result<(), HvError>> for HvStatus {
427 fn from(err: Result<(), HvError>) -> Self {
428 err.err().map_or(Self::SUCCESS, |err| Self(err.0.get()))
429 }
430}
431
432impl Debug for HvStatus {
433 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
434 match self.result() {
435 Ok(()) => f.write_str("Success"),
436 Err(err) => Debug::fmt(&err, f),
437 }
438 }
439}
440
441#[derive(Copy, Clone, PartialEq, Eq, IntoBytes, Immutable, KnownLayout)]
446#[repr(transparent)]
447pub struct HvError(core::num::NonZeroU16);
448
449impl From<core::num::NonZeroU16> for HvError {
450 fn from(err: core::num::NonZeroU16) -> Self {
451 Self(err)
452 }
453}
454
455impl Debug for HvError {
456 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
457 match self.debug_name() {
458 Some(name) => f.pad(name),
459 None => Debug::fmt(&self.0.get(), f),
460 }
461 }
462}
463
464impl core::fmt::Display for HvError {
465 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
466 match self.doc_str() {
467 Some(s) => f.write_str(s),
468 None => write!(f, "Hypervisor error {:#06x}", self.0),
469 }
470 }
471}
472
473impl core::error::Error for HvError {}
474
475macro_rules! hv_error {
476 ($ty:ty, $(#[doc = $doc:expr] $ident:ident = $val:expr),* $(,)?) => {
477
478 #[expect(non_upper_case_globals)]
479 impl $ty {
480 $(
481 #[doc = $doc]
482 pub const $ident: Self = Self(core::num::NonZeroU16::new($val).unwrap());
483 )*
484
485 fn debug_name(&self) -> Option<&'static str> {
486 Some(match self.0.get() {
487 $(
488 $val => stringify!($ident),
489 )*
490 _ => return None,
491 })
492 }
493
494 fn doc_str(&self) -> Option<&'static str> {
495 Some(match self.0.get() {
496 $(
497 $val => const { $doc.trim_ascii() },
498 )*
499 _ => return None,
500 })
501 }
502 }
503 };
504}
505
506hv_error! {
508 HvError,
509 InvalidHypercallCode = 0x0002,
511 InvalidHypercallInput = 0x0003,
513 InvalidAlignment = 0x0004,
515 InvalidParameter = 0x0005,
517 AccessDenied = 0x0006,
519 InvalidPartitionState = 0x0007,
521 OperationDenied = 0x0008,
523 UnknownProperty = 0x0009,
525 PropertyValueOutOfRange = 0x000A,
527 InsufficientMemory = 0x000B,
529 PartitionTooDeep = 0x000C,
531 InvalidPartitionId = 0x000D,
533 InvalidVpIndex = 0x000E,
535 NotFound = 0x0010,
537 InvalidPortId = 0x0011,
539 InvalidConnectionId = 0x0012,
541 InsufficientBuffers = 0x0013,
543 NotAcknowledged = 0x0014,
545 InvalidVpState = 0x0015,
547 Acknowledged = 0x0016,
549 InvalidSaveRestoreState = 0x0017,
551 InvalidSynicState = 0x0018,
553 ObjectInUse = 0x0019,
555 InvalidProximityDomainInfo = 0x001A,
557 NoData = 0x001B,
559 Inactive = 0x001C,
561 NoResources = 0x001D,
563 FeatureUnavailable = 0x001E,
565 PartialPacket = 0x001F,
567 ProcessorFeatureNotSupported = 0x0020,
569 ProcessorCacheLineFlushSizeIncompatible = 0x0030,
571 InsufficientBuffer = 0x0033,
573 IncompatibleProcessor = 0x0037,
575 InsufficientDeviceDomains = 0x0038,
577 CpuidFeatureValidationError = 0x003C,
579 CpuidXsaveFeatureValidationError = 0x003D,
581 ProcessorStartupTimeout = 0x003E,
583 SmxEnabled = 0x003F,
585 InvalidLpIndex = 0x0041,
587 InvalidRegisterValue = 0x0050,
589 InvalidVtlState = 0x0051,
591 NxNotDetected = 0x0055,
593 InvalidDeviceId = 0x0057,
595 InvalidDeviceState = 0x0058,
597 PendingPageRequests = 0x0059,
599 PageRequestInvalid = 0x0060,
601 KeyAlreadyExists = 0x0065,
603 DeviceAlreadyInDomain = 0x0066,
605 InvalidCpuGroupId = 0x006F,
607 InvalidCpuGroupState = 0x0070,
609 OperationFailed = 0x0071,
611 NotAllowedWithNestedVirtActive = 0x0072,
613 InsufficientRootMemory = 0x0073,
615 EventBufferAlreadyFreed = 0x0074,
617 Timeout = 0x0078,
619 VtlAlreadyEnabled = 0x0086,
621 UnknownRegisterName = 0x0087,
623}
624
625pub type HvResult<T> = Result<T, HvError>;
627
628#[repr(u8)]
629#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
630pub enum Vtl {
631 Vtl0 = 0,
632 Vtl1 = 1,
633 Vtl2 = 2,
634}
635
636impl TryFrom<u8> for Vtl {
637 type Error = HvError;
638
639 fn try_from(value: u8) -> Result<Self, Self::Error> {
640 Ok(match value {
641 0 => Self::Vtl0,
642 1 => Self::Vtl1,
643 2 => Self::Vtl2,
644 _ => return Err(HvError::InvalidParameter),
645 })
646 }
647}
648
649impl From<Vtl> for u8 {
650 fn from(value: Vtl) -> Self {
651 value as u8
652 }
653}
654
655#[bitfield(u64)]
657pub struct GuestCrashCtl {
658 #[bits(58)]
659 _reserved: u64,
660 #[bits(3)]
662 pub pre_os_id: u8,
663 #[bits(1)]
665 pub no_crash_dump: bool,
666 #[bits(1)]
669 pub crash_message: bool,
670 #[bits(1)]
672 pub crash_notify: bool,
673}
674
675#[repr(C, align(16))]
676#[derive(Copy, Clone, PartialEq, Eq, IntoBytes, Immutable, KnownLayout, FromBytes)]
677pub struct AlignedU128([u8; 16]);
678
679impl AlignedU128 {
680 pub fn as_ne_bytes(&self) -> [u8; 16] {
681 self.0
682 }
683
684 pub fn from_ne_bytes(val: [u8; 16]) -> Self {
685 Self(val)
686 }
687}
688
689impl Debug for AlignedU128 {
690 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
691 Debug::fmt(&u128::from_ne_bytes(self.0), f)
692 }
693}
694
695impl From<u128> for AlignedU128 {
696 fn from(v: u128) -> Self {
697 Self(v.to_ne_bytes())
698 }
699}
700
701impl From<u64> for AlignedU128 {
702 fn from(v: u64) -> Self {
703 (v as u128).into()
704 }
705}
706
707impl From<u32> for AlignedU128 {
708 fn from(v: u32) -> Self {
709 (v as u128).into()
710 }
711}
712
713impl From<u16> for AlignedU128 {
714 fn from(v: u16) -> Self {
715 (v as u128).into()
716 }
717}
718
719impl From<u8> for AlignedU128 {
720 fn from(v: u8) -> Self {
721 (v as u128).into()
722 }
723}
724
725impl From<AlignedU128> for u128 {
726 fn from(v: AlignedU128) -> Self {
727 u128::from_ne_bytes(v.0)
728 }
729}
730
731open_enum! {
732 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
733 pub enum HvMessageType: u32 {
734 #![expect(non_upper_case_globals)]
735
736 HvMessageTypeNone = 0x00000000,
737
738 HvMessageTypeUnmappedGpa = 0x80000000,
739 HvMessageTypeGpaIntercept = 0x80000001,
740 HvMessageTypeUnacceptedGpa = 0x80000003,
741 HvMessageTypeGpaAttributeIntercept = 0x80000004,
742 HvMessageTypeEnablePartitionVtlIntercept = 0x80000005,
743 HvMessageTypeTimerExpired = 0x80000010,
744 HvMessageTypeInvalidVpRegisterValue = 0x80000020,
745 HvMessageTypeUnrecoverableException = 0x80000021,
746 HvMessageTypeUnsupportedFeature = 0x80000022,
747 HvMessageTypeTlbPageSizeMismatch = 0x80000023,
748 HvMessageTypeIommuFault = 0x80000024,
749 HvMessageTypeEventLogBufferComplete = 0x80000040,
750 HvMessageTypeHypercallIntercept = 0x80000050,
751 HvMessageTypeSynicEventIntercept = 0x80000060,
752 HvMessageTypeSynicSintIntercept = 0x80000061,
753 HvMessageTypeSynicSintDeliverable = 0x80000062,
754 HvMessageTypeAsyncCallCompletion = 0x80000070,
755 HvMessageTypeX64IoPortIntercept = 0x80010000,
756 HvMessageTypeMsrIntercept = 0x80010001,
757 HvMessageTypeX64CpuidIntercept = 0x80010002,
758 HvMessageTypeExceptionIntercept = 0x80010003,
759 HvMessageTypeX64ApicEoi = 0x80010004,
760 HvMessageTypeX64IommuPrq = 0x80010005,
761 HvMessageTypeRegisterIntercept = 0x80010006,
762 HvMessageTypeX64Halt = 0x80010007,
763 HvMessageTypeX64InterruptionDeliverable = 0x80010008,
764 HvMessageTypeX64SipiIntercept = 0x80010009,
765 HvMessageTypeX64RdtscIntercept = 0x8001000a,
766 HvMessageTypeX64ApicSmiIntercept = 0x8001000b,
767 HvMessageTypeArm64ResetIntercept = 0x8001000c,
768 HvMessageTypeX64ApicInitSipiIntercept = 0x8001000d,
769 HvMessageTypeX64ApicWriteIntercept = 0x8001000e,
770 HvMessageTypeX64ProxyInterruptIntercept = 0x8001000f,
771 HvMessageTypeX64IsolationCtrlRegIntercept = 0x80010010,
772 HvMessageTypeX64SnpGuestRequestIntercept = 0x80010011,
773 HvMessageTypeX64ExceptionTrapIntercept = 0x80010012,
774 HvMessageTypeX64SevVmgexitIntercept = 0x80010013,
775 }
776}
777
778impl Default for HvMessageType {
779 fn default() -> Self {
780 HvMessageType::HvMessageTypeNone
781 }
782}
783
784pub const HV_SYNIC_INTERCEPTION_SINT_INDEX: u8 = 0;
785
786pub const NUM_SINTS: usize = 16;
787pub const NUM_TIMERS: usize = 4;
788
789#[repr(C)]
790#[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
791pub struct HvMessageHeader {
792 pub typ: HvMessageType,
793 pub len: u8,
794 pub flags: HvMessageFlags,
795 pub rsvd: u16,
796 pub id: u64,
797}
798
799#[bitfield(u8)]
800#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
801pub struct HvMessageFlags {
802 pub message_pending: bool,
803 #[bits(7)]
804 _reserved: u8,
805}
806
807pub const HV_MESSAGE_SIZE: usize = size_of::<HvMessage>();
808const_assert!(HV_MESSAGE_SIZE == 256);
809pub const HV_MESSAGE_PAYLOAD_SIZE: usize = 240;
810
811#[repr(C, align(16))]
812#[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
813pub struct HvMessage {
814 pub header: HvMessageHeader,
815 pub payload_buffer: [u8; HV_MESSAGE_PAYLOAD_SIZE],
816}
817
818impl Default for HvMessage {
819 fn default() -> Self {
820 Self {
821 header: FromZeros::new_zeroed(),
822 payload_buffer: [0; 240],
823 }
824 }
825}
826
827impl HvMessage {
828 pub fn new(typ: HvMessageType, id: u64, payload: &[u8]) -> Self {
831 let mut msg = HvMessage {
832 header: HvMessageHeader {
833 typ,
834 len: payload.len() as u8,
835 flags: HvMessageFlags::new(),
836 rsvd: 0,
837 id,
838 },
839 payload_buffer: [0; 240],
840 };
841 msg.payload_buffer[..payload.len()].copy_from_slice(payload);
842 msg
843 }
844
845 pub fn payload(&self) -> &[u8] {
846 &self.payload_buffer[..self.header.len as usize]
847 }
848
849 pub fn as_message<T: MessagePayload>(&self) -> &T {
850 let () = T::CHECK;
852 T::ref_from_prefix(&self.payload_buffer).unwrap().0
853 }
854
855 pub fn as_message_mut<T: MessagePayload>(&mut self) -> &T {
856 let () = T::CHECK;
858 T::mut_from_prefix(&mut self.payload_buffer).unwrap().0
859 }
860}
861
862pub trait MessagePayload: KnownLayout + Immutable + IntoBytes + FromBytes + Sized {
863 #[doc(hidden)]
866 const CHECK: () = {
867 assert!(size_of::<Self>() <= HV_MESSAGE_PAYLOAD_SIZE);
868 assert!(align_of::<Self>() <= align_of::<HvMessage>());
869 };
870}
871
872#[repr(C)]
873#[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
874pub struct TimerMessagePayload {
875 pub timer_index: u32,
876 pub reserved: u32,
877 pub expiration_time: u64,
878 pub delivery_time: u64,
879}
880
881pub mod hypercall {
882 use super::*;
883 use core::ops::RangeInclusive;
884 use zerocopy::Unalign;
885
886 #[bitfield(u64)]
888 pub struct Control {
889 pub code: u16,
891 pub fast: bool,
893 #[bits(10)]
895 pub variable_header_size: usize,
896 #[bits(4)]
897 _rsvd0: u8,
898 pub nested: bool,
900 #[bits(12)]
902 pub rep_count: usize,
903 #[bits(4)]
904 _rsvd1: u8,
905 #[bits(12)]
907 pub rep_start: usize,
908 #[bits(4)]
909 _rsvd2: u8,
910 }
911
912 #[bitfield(u64)]
914 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
915 #[must_use]
916 pub struct HypercallOutput {
917 #[bits(16)]
918 pub call_status: HvStatus,
919 pub rsvd: u16,
920 #[bits(12)]
921 pub elements_processed: usize,
922 #[bits(20)]
923 pub rsvd2: u32,
924 }
925
926 impl From<HvError> for HypercallOutput {
927 fn from(e: HvError) -> Self {
928 Self::new().with_call_status(Err(e).into())
929 }
930 }
931
932 impl HypercallOutput {
933 pub const SUCCESS: Self = Self::new();
935
936 pub fn result(&self) -> Result<(), HvError> {
937 self.call_status().result()
938 }
939 }
940
941 #[repr(C)]
942 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
943 pub struct HvRegisterAssoc {
944 pub name: HvRegisterName,
945 pub pad: [u32; 3],
946 pub value: HvRegisterValue,
947 }
948
949 impl<N: Into<HvRegisterName>, T: Into<HvRegisterValue>> From<(N, T)> for HvRegisterAssoc {
950 fn from((name, value): (N, T)) -> Self {
951 Self {
952 name: name.into(),
953 pad: [0; 3],
954 value: value.into(),
955 }
956 }
957 }
958
959 impl<N: Copy + Into<HvRegisterName>, T: Copy + Into<HvRegisterValue>> From<&(N, T)>
960 for HvRegisterAssoc
961 {
962 fn from(&(name, value): &(N, T)) -> Self {
963 Self {
964 name: name.into(),
965 pad: [0; 3],
966 value: value.into(),
967 }
968 }
969 }
970
971 #[bitfield(u64)]
972 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
973 pub struct MsrHypercallContents {
974 pub enable: bool,
975 pub locked: bool,
976 #[bits(10)]
977 pub reserved_p: u64,
978 #[bits(52)]
979 pub gpn: u64,
980 }
981
982 #[repr(C, align(8))]
983 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
984 pub struct PostMessage {
985 pub connection_id: u32,
986 pub padding: u32,
987 pub message_type: u32,
988 pub payload_size: u32,
989 pub payload: [u8; 240],
990 }
991
992 #[repr(C, align(8))]
993 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
994 pub struct SignalEvent {
995 pub connection_id: u32,
996 pub flag_number: u16,
997 pub rsvd: u16,
998 }
999
1000 #[repr(C)]
1001 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1002 pub struct PostMessageDirect {
1003 pub partition_id: u64,
1004 pub vp_index: u32,
1005 pub vtl: u8,
1006 pub padding0: [u8; 3],
1007 pub sint: u8,
1008 pub padding1: [u8; 3],
1009 pub message: Unalign<HvMessage>,
1010 pub padding2: u32,
1011 }
1012
1013 #[repr(C)]
1014 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1015 pub struct SignalEventDirect {
1016 pub target_partition: u64,
1017 pub target_vp: u32,
1018 pub target_vtl: u8,
1019 pub target_sint: u8,
1020 pub flag_number: u16,
1021 }
1022
1023 #[repr(C)]
1024 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1025 pub struct SignalEventDirectOutput {
1026 pub newly_signaled: u8,
1027 pub rsvd: [u8; 7],
1028 }
1029
1030 #[repr(C)]
1031 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1032 pub struct InterruptEntry {
1033 pub source: HvInterruptSource,
1034 pub rsvd: u32,
1035 pub data: [u32; 2],
1036 }
1037
1038 open_enum! {
1039 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1040 pub enum HvInterruptSource: u32 {
1041 MSI = 1,
1042 IO_APIC = 2,
1043 }
1044 }
1045
1046 #[repr(C)]
1047 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1048 pub struct InterruptTarget {
1049 pub vector: u32,
1050 pub flags: HvInterruptTargetFlags,
1051 pub mask_or_format: u64,
1052 }
1053
1054 #[bitfield(u32)]
1055 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1056 pub struct HvInterruptTargetFlags {
1057 pub multicast: bool,
1058 pub processor_set: bool,
1059 pub proxy_redirect: bool,
1060 #[bits(29)]
1061 pub reserved: u32,
1062 }
1063
1064 pub const HV_DEVICE_INTERRUPT_TARGET_MULTICAST: u32 = 1;
1065 pub const HV_DEVICE_INTERRUPT_TARGET_PROCESSOR_SET: u32 = 2;
1066 pub const HV_DEVICE_INTERRUPT_TARGET_PROXY_REDIRECT: u32 = 4;
1067
1068 pub const HV_GENERIC_SET_SPARSE_4K: u64 = 0;
1069 pub const HV_GENERIC_SET_ALL: u64 = 1;
1070
1071 #[repr(C)]
1072 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1073 pub struct RetargetDeviceInterrupt {
1074 pub partition_id: u64,
1075 pub device_id: u64,
1076 pub entry: InterruptEntry,
1077 pub rsvd: u64,
1078 pub target_header: InterruptTarget,
1079 }
1080
1081 #[bitfield(u8)]
1082 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1083 pub struct HvInputVtl {
1084 #[bits(4)]
1085 pub target_vtl_value: u8,
1086 pub use_target_vtl: bool,
1087 #[bits(3)]
1088 pub reserved: u8,
1089 }
1090
1091 impl From<Vtl> for HvInputVtl {
1092 fn from(value: Vtl) -> Self {
1093 Self::from(Some(value))
1094 }
1095 }
1096
1097 impl From<Option<Vtl>> for HvInputVtl {
1098 fn from(value: Option<Vtl>) -> Self {
1099 Self::new()
1100 .with_use_target_vtl(value.is_some())
1101 .with_target_vtl_value(value.map_or(0, Into::into))
1102 }
1103 }
1104
1105 impl HvInputVtl {
1106 pub fn target_vtl(&self) -> Result<Option<Vtl>, HvError> {
1108 if self.reserved() != 0 {
1109 return Err(HvError::InvalidParameter);
1110 }
1111 if self.use_target_vtl() {
1112 Ok(Some(self.target_vtl_value().try_into()?))
1113 } else {
1114 Ok(None)
1115 }
1116 }
1117
1118 pub const CURRENT_VTL: Self = Self::new();
1119 }
1120
1121 #[repr(C)]
1122 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1123 pub struct GetSetVpRegisters {
1124 pub partition_id: u64,
1125 pub vp_index: u32,
1126 pub target_vtl: HvInputVtl,
1127 pub rsvd: [u8; 3],
1128 }
1129
1130 open_enum::open_enum! {
1131 #[derive(Default)]
1132 pub enum HvGuestOsMicrosoftIds: u8 {
1133 UNDEFINED = 0x00,
1134 MSDOS = 0x01,
1135 WINDOWS_3X = 0x02,
1136 WINDOWS_9X = 0x03,
1137 WINDOWS_NT = 0x04,
1138 WINDOWS_CE = 0x05,
1139 }
1140 }
1141
1142 #[bitfield(u64)]
1143 pub struct HvGuestOsMicrosoft {
1144 #[bits(40)]
1145 _rsvd: u64,
1146 #[bits(8)]
1147 pub os_id: u8,
1148 #[bits(16)]
1150 pub vendor_id: u16,
1151 }
1152
1153 open_enum::open_enum! {
1154 #[derive(Default)]
1155 pub enum HvGuestOsOpenSourceType: u8 {
1156 UNDEFINED = 0x00,
1157 LINUX = 0x01,
1158 FREEBSD = 0x02,
1159 XEN = 0x03,
1160 ILLUMOS = 0x04,
1161 }
1162 }
1163
1164 #[bitfield(u64)]
1165 pub struct HvGuestOsOpenSource {
1166 #[bits(16)]
1167 pub build_no: u16,
1168 #[bits(32)]
1169 pub version: u32,
1170 #[bits(8)]
1171 pub os_id: u8,
1172 #[bits(7)]
1173 pub os_type: u8,
1174 #[bits(1)]
1175 pub is_open_source: bool,
1176 }
1177
1178 #[bitfield(u64)]
1179 pub struct HvGuestOsId {
1180 #[bits(63)]
1181 _rsvd: u64,
1182 is_open_source: bool,
1183 }
1184
1185 impl HvGuestOsId {
1186 pub fn microsoft(&self) -> Option<HvGuestOsMicrosoft> {
1187 (!self.is_open_source()).then(|| HvGuestOsMicrosoft::from(u64::from(*self)))
1188 }
1189
1190 pub fn open_source(&self) -> Option<HvGuestOsOpenSource> {
1191 (self.is_open_source()).then(|| HvGuestOsOpenSource::from(u64::from(*self)))
1192 }
1193
1194 pub fn as_u64(&self) -> u64 {
1195 self.0
1196 }
1197 }
1198
1199 pub const HV_INTERCEPT_ACCESS_MASK_NONE: u32 = 0x00;
1200 pub const HV_INTERCEPT_ACCESS_MASK_READ: u32 = 0x01;
1201 pub const HV_INTERCEPT_ACCESS_MASK_WRITE: u32 = 0x02;
1202 pub const HV_INTERCEPT_ACCESS_MASK_READ_WRITE: u32 =
1203 HV_INTERCEPT_ACCESS_MASK_READ | HV_INTERCEPT_ACCESS_MASK_WRITE;
1204 pub const HV_INTERCEPT_ACCESS_MASK_EXECUTE: u32 = 0x04;
1205
1206 open_enum::open_enum! {
1207 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1208 pub enum HvInterceptType: u32 {
1209 #![expect(non_upper_case_globals)]
1210 HvInterceptTypeX64IoPort = 0x00000000,
1211 HvInterceptTypeX64Msr = 0x00000001,
1212 HvInterceptTypeX64Cpuid = 0x00000002,
1213 HvInterceptTypeException = 0x00000003,
1214 HvInterceptTypeHypercall = 0x00000008,
1215 HvInterceptTypeUnknownSynicConnection = 0x0000000D,
1216 HvInterceptTypeX64ApicEoi = 0x0000000E,
1217 HvInterceptTypeRetargetInterruptWithUnknownDeviceId = 0x0000000F,
1218 HvInterceptTypeX64IoPortRange = 0x00000011,
1219 }
1220 }
1221
1222 #[repr(transparent)]
1223 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, Debug)]
1224 pub struct HvInterceptParameters(u64);
1225
1226 impl HvInterceptParameters {
1227 pub fn new_io_port(port: u16) -> Self {
1228 Self(port as u64)
1229 }
1230
1231 pub fn new_io_port_range(ports: RangeInclusive<u16>) -> Self {
1232 let base = *ports.start() as u64;
1233 let end = *ports.end() as u64;
1234 Self(base | (end << 16))
1235 }
1236
1237 pub fn new_exception(vector: u16) -> Self {
1238 Self(vector as u64)
1239 }
1240
1241 pub fn io_port(&self) -> u16 {
1242 self.0 as u16
1243 }
1244
1245 pub fn io_port_range(&self) -> RangeInclusive<u16> {
1246 let base = self.0 as u16;
1247 let end = (self.0 >> 16) as u16;
1248 base..=end
1249 }
1250
1251 pub fn cpuid_index(&self) -> u32 {
1252 self.0 as u32
1253 }
1254
1255 pub fn exception(&self) -> u16 {
1256 self.0 as u16
1257 }
1258 }
1259
1260 #[repr(C)]
1261 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, Debug)]
1262 pub struct InstallIntercept {
1263 pub partition_id: u64,
1264 pub access_type_mask: u32,
1265 pub intercept_type: HvInterceptType,
1266 pub intercept_parameters: HvInterceptParameters,
1267 }
1268
1269 #[repr(C)]
1270 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, Debug)]
1271 pub struct AssertVirtualInterrupt {
1272 pub partition_id: u64,
1273 pub interrupt_control: HvInterruptControl,
1274 pub destination_address: u64,
1275 pub requested_vector: u32,
1276 pub target_vtl: u8,
1277 pub rsvd0: u8,
1278 pub rsvd1: u16,
1279 }
1280
1281 #[repr(C)]
1282 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1283 pub struct StartVirtualProcessorX64 {
1284 pub partition_id: u64,
1285 pub vp_index: u32,
1286 pub target_vtl: u8,
1287 pub rsvd0: u8,
1288 pub rsvd1: u16,
1289 pub vp_context: InitialVpContextX64,
1290 }
1291
1292 #[repr(C)]
1293 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1294 pub struct InitialVpContextX64 {
1295 pub rip: u64,
1296 pub rsp: u64,
1297 pub rflags: u64,
1298 pub cs: HvX64SegmentRegister,
1299 pub ds: HvX64SegmentRegister,
1300 pub es: HvX64SegmentRegister,
1301 pub fs: HvX64SegmentRegister,
1302 pub gs: HvX64SegmentRegister,
1303 pub ss: HvX64SegmentRegister,
1304 pub tr: HvX64SegmentRegister,
1305 pub ldtr: HvX64SegmentRegister,
1306 pub idtr: HvX64TableRegister,
1307 pub gdtr: HvX64TableRegister,
1308 pub efer: u64,
1309 pub cr0: u64,
1310 pub cr3: u64,
1311 pub cr4: u64,
1312 pub msr_cr_pat: u64,
1313 }
1314
1315 #[repr(C)]
1316 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1317 pub struct StartVirtualProcessorArm64 {
1318 pub partition_id: u64,
1319 pub vp_index: u32,
1320 pub target_vtl: u8,
1321 pub rsvd0: u8,
1322 pub rsvd1: u16,
1323 pub vp_context: InitialVpContextArm64,
1324 }
1325
1326 #[repr(C)]
1327 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1328 pub struct InitialVpContextArm64 {
1329 pub pc: u64,
1330 pub sp_elh: u64,
1331 pub sctlr_el1: u64,
1332 pub mair_el1: u64,
1333 pub tcr_el1: u64,
1334 pub vbar_el1: u64,
1335 pub ttbr0_el1: u64,
1336 pub ttbr1_el1: u64,
1337 pub x18: u64,
1338 }
1339
1340 impl InitialVpContextX64 {
1341 pub fn as_hv_register_assocs(&self) -> impl Iterator<Item = HvRegisterAssoc> + '_ {
1342 let regs = [
1343 (HvX64RegisterName::Rip, HvRegisterValue::from(self.rip)).into(),
1344 (HvX64RegisterName::Rsp, HvRegisterValue::from(self.rsp)).into(),
1345 (
1346 HvX64RegisterName::Rflags,
1347 HvRegisterValue::from(self.rflags),
1348 )
1349 .into(),
1350 (HvX64RegisterName::Cs, HvRegisterValue::from(self.cs)).into(),
1351 (HvX64RegisterName::Ds, HvRegisterValue::from(self.ds)).into(),
1352 (HvX64RegisterName::Es, HvRegisterValue::from(self.es)).into(),
1353 (HvX64RegisterName::Fs, HvRegisterValue::from(self.fs)).into(),
1354 (HvX64RegisterName::Gs, HvRegisterValue::from(self.gs)).into(),
1355 (HvX64RegisterName::Ss, HvRegisterValue::from(self.ss)).into(),
1356 (HvX64RegisterName::Tr, HvRegisterValue::from(self.tr)).into(),
1357 (HvX64RegisterName::Ldtr, HvRegisterValue::from(self.ldtr)).into(),
1358 (HvX64RegisterName::Idtr, HvRegisterValue::from(self.idtr)).into(),
1359 (HvX64RegisterName::Gdtr, HvRegisterValue::from(self.gdtr)).into(),
1360 (HvX64RegisterName::Efer, HvRegisterValue::from(self.efer)).into(),
1361 (HvX64RegisterName::Cr0, HvRegisterValue::from(self.cr0)).into(),
1362 (HvX64RegisterName::Cr3, HvRegisterValue::from(self.cr3)).into(),
1363 (HvX64RegisterName::Cr4, HvRegisterValue::from(self.cr4)).into(),
1364 (
1365 HvX64RegisterName::Pat,
1366 HvRegisterValue::from(self.msr_cr_pat),
1367 )
1368 .into(),
1369 ];
1370 regs.into_iter()
1371 }
1372 }
1373
1374 #[bitfield(u64)]
1375 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1376 pub struct TranslateGvaControlFlagsX64 {
1377 pub validate_read: bool,
1379 pub validate_write: bool,
1381 pub validate_execute: bool,
1383 pub privilege_exempt: bool,
1386 pub set_page_table_bits: bool,
1388 pub tlb_flush_inhibit: bool,
1390 pub supervisor_access: bool,
1392 pub user_access: bool,
1394 pub enforce_smap: bool,
1398 pub override_smap: bool,
1401 pub shadow_stack: bool,
1403 #[bits(45)]
1404 _unused: u64,
1405 input_vtl_value: u8,
1407 }
1408
1409 impl TranslateGvaControlFlagsX64 {
1410 pub fn input_vtl(&self) -> HvInputVtl {
1411 self.input_vtl_value().into()
1412 }
1413
1414 pub fn with_input_vtl(self, input_vtl: HvInputVtl) -> Self {
1415 self.with_input_vtl_value(input_vtl.into())
1416 }
1417
1418 pub fn set_input_vtl(&mut self, input_vtl: HvInputVtl) {
1419 self.set_input_vtl_value(input_vtl.into())
1420 }
1421 }
1422
1423 #[bitfield(u64)]
1424 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1425 pub struct TranslateGvaControlFlagsArm64 {
1426 pub validate_read: bool,
1428 pub validate_write: bool,
1430 pub validate_execute: bool,
1432 _reserved0: bool,
1433 pub set_page_table_bits: bool,
1435 pub tlb_flush_inhibit: bool,
1437 pub supervisor_access: bool,
1439 pub user_access: bool,
1441 pub pan_set: bool,
1444 pub pan_clear: bool,
1447 #[bits(46)]
1448 _unused: u64,
1449 #[bits(8)]
1451 input_vtl_value: u8,
1452 }
1453
1454 impl TranslateGvaControlFlagsArm64 {
1455 pub fn input_vtl(&self) -> HvInputVtl {
1456 self.input_vtl_value().into()
1457 }
1458
1459 pub fn with_input_vtl(self, input_vtl: HvInputVtl) -> Self {
1460 self.with_input_vtl_value(input_vtl.into())
1461 }
1462
1463 pub fn set_input_vtl(&mut self, input_vtl: HvInputVtl) {
1464 self.set_input_vtl_value(input_vtl.into())
1465 }
1466 }
1467
1468 #[repr(C)]
1469 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1470 pub struct TranslateVirtualAddressX64 {
1471 pub partition_id: u64,
1472 pub vp_index: u32,
1473 pub reserved: u32,
1475 pub control_flags: TranslateGvaControlFlagsX64,
1476 pub gva_page: u64,
1477 }
1478
1479 #[repr(C)]
1480 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1481 pub struct TranslateVirtualAddressArm64 {
1482 pub partition_id: u64,
1483 pub vp_index: u32,
1484 pub reserved: u32,
1486 pub control_flags: TranslateGvaControlFlagsArm64,
1487 pub gva_page: u64,
1488 }
1489
1490 open_enum::open_enum! {
1491 pub enum TranslateGvaResultCode: u32 {
1492 SUCCESS = 0,
1493
1494 PAGE_NOT_PRESENT = 1,
1496 PRIVILEGE_VIOLATION = 2,
1497 INVALID_PAGE_TABLE_FLAGS = 3,
1498
1499 GPA_UNMAPPED = 4,
1501 GPA_NO_READ_ACCESS = 5,
1502 GPA_NO_WRITE_ACCESS = 6,
1503 GPA_ILLEGAL_OVERLAY_ACCESS = 7,
1504
1505 INTERCEPT = 8,
1509
1510 GPA_UNACCEPTED = 9,
1511 }
1512 }
1513
1514 #[bitfield(u64)]
1515 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1516 pub struct TranslateGvaResult {
1517 pub result_code: u32,
1518 pub cache_type: u8,
1519 pub overlay_page: bool,
1520 #[bits(23)]
1521 pub reserved: u32,
1522 }
1523
1524 #[repr(C)]
1525 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1526 pub struct TranslateVirtualAddressOutput {
1527 pub translation_result: TranslateGvaResult,
1528 pub gpa_page: u64,
1529 }
1530
1531 #[repr(C)]
1532 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1533 pub struct TranslateGvaResultExX64 {
1534 pub result: TranslateGvaResult,
1535 pub reserved: u64,
1536 pub event_info: HvX64PendingEvent,
1537 }
1538
1539 const_assert!(size_of::<TranslateGvaResultExX64>() == 0x30);
1540
1541 #[repr(C)]
1542 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1543 pub struct TranslateGvaResultExArm64 {
1544 pub result: TranslateGvaResult,
1545 }
1546
1547 const_assert!(size_of::<TranslateGvaResultExArm64>() == 0x8);
1548
1549 #[repr(C)]
1550 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1551 pub struct TranslateVirtualAddressExOutputX64 {
1552 pub translation_result: TranslateGvaResultExX64,
1553 pub gpa_page: u64,
1554 pub reserved: u64,
1556 }
1557
1558 const_assert!(size_of::<TranslateVirtualAddressExOutputX64>() == 0x40);
1559
1560 #[repr(C)]
1561 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1562 pub struct TranslateVirtualAddressExOutputArm64 {
1563 pub translation_result: TranslateGvaResultExArm64,
1564 pub gpa_page: u64,
1565 }
1566
1567 const_assert!(size_of::<TranslateVirtualAddressExOutputArm64>() == 0x10);
1568
1569 #[repr(C)]
1570 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1571 pub struct GetVpIndexFromApicId {
1572 pub partition_id: u64,
1573 pub target_vtl: u8,
1574 pub reserved: [u8; 7],
1575 }
1576
1577 #[repr(C)]
1578 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1579 pub struct EnableVpVtlX64 {
1580 pub partition_id: u64,
1581 pub vp_index: u32,
1582 pub target_vtl: u8,
1583 pub reserved: [u8; 3],
1584 pub vp_vtl_context: InitialVpContextX64,
1585 }
1586
1587 #[repr(C)]
1588 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1589 pub struct EnableVpVtlArm64 {
1590 pub partition_id: u64,
1591 pub vp_index: u32,
1592 pub target_vtl: u8,
1593 pub reserved: [u8; 3],
1594 pub vp_vtl_context: InitialVpContextArm64,
1595 }
1596
1597 #[repr(C)]
1598 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1599 pub struct ModifyVtlProtectionMask {
1600 pub partition_id: u64,
1601 pub map_flags: HvMapGpaFlags,
1602 pub target_vtl: HvInputVtl,
1603 pub reserved: [u8; 3],
1604 }
1605
1606 #[repr(C)]
1607 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1608 pub struct CheckSparseGpaPageVtlAccess {
1609 pub partition_id: u64,
1610 pub target_vtl: HvInputVtl,
1611 pub desired_access: u8,
1612 pub reserved0: u16,
1613 pub reserved1: u32,
1614 }
1615 const_assert!(size_of::<CheckSparseGpaPageVtlAccess>() == 0x10);
1616
1617 #[bitfield(u64)]
1618 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1619 pub struct CheckSparseGpaPageVtlAccessOutput {
1620 pub result_code: u8,
1621 pub denied_access: u8,
1622 #[bits(4)]
1623 pub intercepting_vtl: u32,
1624 #[bits(12)]
1625 _reserved0: u32,
1626 _reserved1: u32,
1627 }
1628 const_assert!(size_of::<CheckSparseGpaPageVtlAccessOutput>() == 0x8);
1629
1630 open_enum::open_enum! {
1631 pub enum CheckGpaPageVtlAccessResultCode: u32 {
1632 SUCCESS = 0,
1633 MEMORY_INTERCEPT = 1,
1634 }
1635 }
1636
1637 pub const HV_VTL_PERMISSION_SET_SIZE: usize = 2;
1639
1640 #[repr(C)]
1641 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1642 pub struct VtlPermissionSet {
1643 pub vtl_permission_from_1: [u16; HV_VTL_PERMISSION_SET_SIZE],
1645 }
1646
1647 open_enum::open_enum! {
1648 pub enum AcceptMemoryType: u32 {
1649 ANY = 0,
1650 RAM = 1,
1651 }
1652 }
1653
1654 open_enum! {
1655 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1662 pub enum HostVisibilityType: u8 {
1663 PRIVATE = 0,
1664 SHARED = 3,
1665 }
1666 }
1667
1668 impl HostVisibilityType {
1670 const fn from_bits(value: u8) -> Self {
1671 Self(value)
1672 }
1673
1674 const fn into_bits(value: Self) -> u8 {
1675 value.0
1676 }
1677 }
1678
1679 #[bitfield(u32)]
1681 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1682 pub struct AcceptPagesAttributes {
1683 #[bits(6)]
1684 pub memory_type: u32,
1686 #[bits(2)]
1687 pub host_visibility: HostVisibilityType,
1689 #[bits(3)]
1690 pub vtl_set: u32,
1692 #[bits(21)]
1693 _reserved: u32,
1694 }
1695
1696 #[repr(C)]
1697 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1698 pub struct AcceptGpaPages {
1699 pub partition_id: u64,
1701 pub page_attributes: AcceptPagesAttributes,
1704 pub vtl_permission_set: VtlPermissionSet,
1706 pub gpa_page_base: u64,
1708 }
1709 const_assert!(size_of::<AcceptGpaPages>() == 0x18);
1710
1711 #[bitfield(u32)]
1713 pub struct UnacceptPagesAttributes {
1714 #[bits(3)]
1715 pub vtl_set: u32,
1716 #[bits(29)]
1717 _reserved: u32,
1718 }
1719
1720 #[repr(C)]
1721 pub struct UnacceptGpaPages {
1722 pub partition_id: u64,
1724 pub page_attributes: UnacceptPagesAttributes,
1726 pub vtl_permission_set: VtlPermissionSet,
1728 pub gpa_page_base: u64,
1730 }
1731 const_assert!(size_of::<UnacceptGpaPages>() == 0x18);
1732
1733 #[bitfield(u32)]
1734 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1735 pub struct ModifyHostVisibility {
1736 #[bits(2)]
1737 pub host_visibility: HostVisibilityType,
1738 #[bits(30)]
1739 _reserved: u32,
1740 }
1741
1742 #[repr(C)]
1743 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1744 pub struct ModifySparsePageVisibility {
1745 pub partition_id: u64,
1746 pub host_visibility: ModifyHostVisibility,
1747 pub reserved: u32,
1748 }
1749
1750 #[repr(C)]
1751 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1752 pub struct QuerySparsePageVisibility {
1753 pub partition_id: u64,
1754 }
1755
1756 pub const VBS_VM_REPORT_DATA_SIZE: usize = 64;
1757 pub const VBS_VM_MAX_REPORT_SIZE: usize = 2048;
1758
1759 #[repr(C)]
1760 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1761 pub struct VbsVmCallReport {
1762 pub report_data: [u8; VBS_VM_REPORT_DATA_SIZE],
1763 }
1764
1765 #[repr(C)]
1766 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1767 pub struct VbsVmCallReportOutput {
1768 pub report: [u8; VBS_VM_MAX_REPORT_SIZE],
1769 }
1770
1771 #[bitfield(u8)]
1772 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1773 pub struct EnablePartitionVtlFlags {
1774 pub enable_mbec: bool,
1775 pub enable_supervisor_shadow_stack: bool,
1776 pub enable_hardware_hvpt: bool,
1777 #[bits(5)]
1778 pub reserved: u8,
1779 }
1780
1781 #[repr(C)]
1782 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1783 pub struct EnablePartitionVtl {
1784 pub partition_id: u64,
1785 pub target_vtl: u8,
1786 pub flags: EnablePartitionVtlFlags,
1787 pub reserved_z0: u16,
1788 pub reserved_z1: u32,
1789 }
1790
1791 #[repr(C)]
1792 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1793 pub struct FlushVirtualAddressSpace {
1794 pub address_space: u64,
1795 pub flags: HvFlushFlags,
1796 pub processor_mask: u64,
1797 }
1798
1799 #[repr(C)]
1800 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1801 pub struct FlushVirtualAddressSpaceEx {
1802 pub address_space: u64,
1803 pub flags: HvFlushFlags,
1804 pub vp_set_format: u64,
1805 pub vp_set_valid_banks_mask: u64,
1806 }
1808
1809 #[repr(C)]
1810 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1811 pub struct PinUnpinGpaPageRangesHeader {
1812 pub reserved: u64,
1813 }
1814
1815 #[repr(C)]
1816 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1817 pub struct SendSyntheticClusterIpi {
1818 pub vector: u32,
1819 pub target_vtl: HvInputVtl,
1820 pub flags: u8,
1821 pub reserved: u16,
1822 pub processor_mask: u64,
1823 }
1824
1825 #[repr(C)]
1826 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1827 pub struct SendSyntheticClusterIpiEx {
1828 pub vector: u32,
1829 pub target_vtl: HvInputVtl,
1830 pub flags: u8,
1831 pub reserved: u16,
1832 pub vp_set_format: u64,
1833 pub vp_set_valid_banks_mask: u64,
1834 }
1836
1837 #[bitfield(u64)]
1838 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1839 pub struct HvFlushFlags {
1840 pub all_processors: bool,
1841 pub all_virtual_address_spaces: bool,
1842 pub non_global_mappings_only: bool,
1843 pub use_extended_range_format: bool,
1844 pub use_target_vtl: bool,
1845
1846 #[bits(3)]
1847 _reserved: u8,
1848
1849 pub target_vtl0: bool,
1850 pub target_vtl1: bool,
1851
1852 #[bits(54)]
1853 _reserved2: u64,
1854 }
1855
1856 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1857 #[repr(transparent)]
1858 pub struct HvGvaRange(pub u64);
1859
1860 impl From<u64> for HvGvaRange {
1861 fn from(value: u64) -> Self {
1862 Self(value)
1863 }
1864 }
1865
1866 impl From<HvGvaRange> for u64 {
1867 fn from(value: HvGvaRange) -> Self {
1868 value.0
1869 }
1870 }
1871
1872 impl HvGvaRange {
1873 pub fn as_simple(self) -> HvGvaRangeSimple {
1874 HvGvaRangeSimple(self.0)
1875 }
1876
1877 pub fn as_extended(self) -> HvGvaRangeExtended {
1878 HvGvaRangeExtended(self.0)
1879 }
1880
1881 pub fn as_extended_large_page(self) -> HvGvaRangeExtendedLargePage {
1882 HvGvaRangeExtendedLargePage(self.0)
1883 }
1884 }
1885
1886 #[bitfield(u64)]
1887 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1888 pub struct HvGvaRangeSimple {
1889 #[bits(12)]
1891 pub additional_pages: u64,
1892 #[bits(52)]
1894 pub gva_page_number: u64,
1895 }
1896
1897 #[bitfield(u64)]
1898 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1899 pub struct HvGvaRangeExtended {
1900 #[bits(11)]
1902 pub additional_pages: u64,
1903 pub large_page: bool,
1905 #[bits(52)]
1907 pub gva_page_number: u64,
1908 }
1909
1910 #[bitfield(u64)]
1911 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1912 pub struct HvGvaRangeExtendedLargePage {
1913 #[bits(11)]
1915 pub additional_pages: u64,
1916 pub large_page: bool,
1918 pub page_size: bool,
1922 #[bits(8)]
1923 _reserved: u64,
1924 #[bits(43)]
1926 pub gva_large_page_number: u64,
1927 }
1928
1929 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1930 #[repr(transparent)]
1931 pub struct HvGpaRange(pub u64);
1932
1933 impl HvGpaRange {
1934 pub fn as_simple(self) -> HvGpaRangeSimple {
1935 HvGpaRangeSimple(self.0)
1936 }
1937
1938 pub fn as_extended(self) -> HvGpaRangeExtended {
1939 HvGpaRangeExtended(self.0)
1940 }
1941
1942 pub fn as_extended_large_page(self) -> HvGpaRangeExtendedLargePage {
1943 HvGpaRangeExtendedLargePage(self.0)
1944 }
1945 }
1946
1947 #[bitfield(u64)]
1948 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1949 pub struct HvGpaRangeSimple {
1950 #[bits(12)]
1952 pub additional_pages: u64,
1953 #[bits(52)]
1955 pub gpa_page_number: u64,
1956 }
1957
1958 #[bitfield(u64)]
1959 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1960 pub struct HvGpaRangeExtended {
1961 #[bits(11)]
1963 pub additional_pages: u64,
1964 pub large_page: bool,
1966 #[bits(52)]
1968 pub gpa_page_number: u64,
1969 }
1970
1971 #[bitfield(u64)]
1972 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1973 pub struct HvGpaRangeExtendedLargePage {
1974 #[bits(11)]
1976 pub additional_pages: u64,
1977 pub large_page: bool,
1979 pub page_size: bool,
1983 #[bits(8)]
1984 _reserved: u64,
1985 #[bits(43)]
1987 pub gpa_large_page_number: u64,
1988 }
1989
1990 pub const HV_HYPERCALL_MMIO_MAX_DATA_LENGTH: usize = 64;
1991
1992 #[repr(C)]
1993 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1994 pub struct MemoryMappedIoRead {
1995 pub gpa: u64,
1996 pub access_width: u32,
1997 pub reserved_z0: u32,
1998 }
1999
2000 #[repr(C)]
2001 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2002 pub struct MemoryMappedIoReadOutput {
2003 pub data: [u8; HV_HYPERCALL_MMIO_MAX_DATA_LENGTH],
2004 }
2005
2006 #[repr(C)]
2007 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2008 pub struct MemoryMappedIoWrite {
2009 pub gpa: u64,
2010 pub access_width: u32,
2011 pub reserved_z0: u32,
2012 pub data: [u8; HV_HYPERCALL_MMIO_MAX_DATA_LENGTH],
2013 }
2014
2015 #[repr(C)]
2016 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2017 pub struct RestorePartitionTime {
2018 pub partition_id: u64,
2019 pub tsc_sequence: u32,
2020 pub reserved: u32,
2021 pub reference_time_in_100_ns: u64,
2022 pub tsc: u64,
2023 }
2024}
2025
2026macro_rules! registers {
2027 ($name:ident {
2028 $(
2029 $(#[$vattr:meta])*
2030 $variant:ident = $value:expr
2031 ),*
2032 $(,)?
2033 }) => {
2034 open_enum! {
2035 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2036 pub enum $name: u32 {
2037 #![expect(non_upper_case_globals)]
2038 $($variant = $value,)*
2039 InstructionEmulationHints = 0x00000002,
2040 InternalActivityState = 0x00000004,
2041
2042 GuestCrashP0 = 0x00000210,
2044 GuestCrashP1 = 0x00000211,
2045 GuestCrashP2 = 0x00000212,
2046 GuestCrashP3 = 0x00000213,
2047 GuestCrashP4 = 0x00000214,
2048 GuestCrashCtl = 0x00000215,
2049
2050 PendingInterruption = 0x00010002,
2051 InterruptState = 0x00010003,
2052 PendingEvent0 = 0x00010004,
2053 PendingEvent1 = 0x00010005,
2054 DeliverabilityNotifications = 0x00010006,
2055
2056 GicrBaseGpa = 0x00063000,
2057
2058 VpRuntime = 0x00090000,
2059 GuestOsId = 0x00090002,
2060 VpIndex = 0x00090003,
2061 TimeRefCount = 0x00090004,
2062 CpuManagementVersion = 0x00090007,
2063 VpAssistPage = 0x00090013,
2064 VpRootSignalCount = 0x00090014,
2065 ReferenceTsc = 0x00090017,
2066 VpConfig = 0x00090018,
2067 Ghcb = 0x00090019,
2068 ReferenceTscSequence = 0x0009001A,
2069 GuestSchedulerEvent = 0x0009001B,
2070
2071 Sint0 = 0x000A0000,
2072 Sint1 = 0x000A0001,
2073 Sint2 = 0x000A0002,
2074 Sint3 = 0x000A0003,
2075 Sint4 = 0x000A0004,
2076 Sint5 = 0x000A0005,
2077 Sint6 = 0x000A0006,
2078 Sint7 = 0x000A0007,
2079 Sint8 = 0x000A0008,
2080 Sint9 = 0x000A0009,
2081 Sint10 = 0x000A000A,
2082 Sint11 = 0x000A000B,
2083 Sint12 = 0x000A000C,
2084 Sint13 = 0x000A000D,
2085 Sint14 = 0x000A000E,
2086 Sint15 = 0x000A000F,
2087 Scontrol = 0x000A0010,
2088 Sversion = 0x000A0011,
2089 Sifp = 0x000A0012,
2090 Sipp = 0x000A0013,
2091 Eom = 0x000A0014,
2092 Sirbp = 0x000A0015,
2093
2094 Stimer0Config = 0x000B0000,
2095 Stimer0Count = 0x000B0001,
2096 Stimer1Config = 0x000B0002,
2097 Stimer1Count = 0x000B0003,
2098 Stimer2Config = 0x000B0004,
2099 Stimer2Count = 0x000B0005,
2100 Stimer3Config = 0x000B0006,
2101 Stimer3Count = 0x000B0007,
2102 StimeUnhaltedTimerConfig = 0x000B0100,
2103 StimeUnhaltedTimerCount = 0x000B0101,
2104
2105 VsmCodePageOffsets = 0x000D0002,
2106 VsmVpStatus = 0x000D0003,
2107 VsmPartitionStatus = 0x000D0004,
2108 VsmVina = 0x000D0005,
2109 VsmCapabilities = 0x000D0006,
2110 VsmPartitionConfig = 0x000D0007,
2111 GuestVsmPartitionConfig = 0x000D0008,
2112 VsmVpSecureConfigVtl0 = 0x000D0010,
2113 VsmVpSecureConfigVtl1 = 0x000D0011,
2114 VsmVpSecureConfigVtl2 = 0x000D0012,
2115 VsmVpSecureConfigVtl3 = 0x000D0013,
2116 VsmVpSecureConfigVtl4 = 0x000D0014,
2117 VsmVpSecureConfigVtl5 = 0x000D0015,
2118 VsmVpSecureConfigVtl6 = 0x000D0016,
2119 VsmVpSecureConfigVtl7 = 0x000D0017,
2120 VsmVpSecureConfigVtl8 = 0x000D0018,
2121 VsmVpSecureConfigVtl9 = 0x000D0019,
2122 VsmVpSecureConfigVtl10 = 0x000D001A,
2123 VsmVpSecureConfigVtl11 = 0x000D001B,
2124 VsmVpSecureConfigVtl12 = 0x000D001C,
2125 VsmVpSecureConfigVtl13 = 0x000D001D,
2126 VsmVpSecureConfigVtl14 = 0x000D001E,
2127 VsmVpWaitForTlbLock = 0x000D0020,
2128 }
2129 }
2130
2131 impl From<HvRegisterName> for $name {
2132 fn from(name: HvRegisterName) -> Self {
2133 Self(name.0)
2134 }
2135 }
2136
2137 impl From<$name> for HvRegisterName {
2138 fn from(name: $name) -> Self {
2139 Self(name.0)
2140 }
2141 }
2142 };
2143}
2144
2145#[repr(C)]
2150#[derive(Debug, Copy, Clone, PartialEq, Eq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2151pub struct HvRegisterName(pub u32);
2152
2153registers! {
2154 HvAllArchRegisterName {}
2156}
2157
2158impl From<HvAllArchRegisterName> for HvX64RegisterName {
2159 fn from(name: HvAllArchRegisterName) -> Self {
2160 Self(name.0)
2161 }
2162}
2163
2164impl From<HvAllArchRegisterName> for HvArm64RegisterName {
2165 fn from(name: HvAllArchRegisterName) -> Self {
2166 Self(name.0)
2167 }
2168}
2169
2170registers! {
2171 HvX64RegisterName {
2172 Rax = 0x00020000,
2174 Rcx = 0x00020001,
2175 Rdx = 0x00020002,
2176 Rbx = 0x00020003,
2177 Rsp = 0x00020004,
2178 Rbp = 0x00020005,
2179 Rsi = 0x00020006,
2180 Rdi = 0x00020007,
2181 R8 = 0x00020008,
2182 R9 = 0x00020009,
2183 R10 = 0x0002000a,
2184 R11 = 0x0002000b,
2185 R12 = 0x0002000c,
2186 R13 = 0x0002000d,
2187 R14 = 0x0002000e,
2188 R15 = 0x0002000f,
2189 Rip = 0x00020010,
2190 Rflags = 0x00020011,
2191
2192 Xmm0 = 0x00030000,
2194 Xmm1 = 0x00030001,
2195 Xmm2 = 0x00030002,
2196 Xmm3 = 0x00030003,
2197 Xmm4 = 0x00030004,
2198 Xmm5 = 0x00030005,
2199 Xmm6 = 0x00030006,
2200 Xmm7 = 0x00030007,
2201 Xmm8 = 0x00030008,
2202 Xmm9 = 0x00030009,
2203 Xmm10 = 0x0003000A,
2204 Xmm11 = 0x0003000B,
2205 Xmm12 = 0x0003000C,
2206 Xmm13 = 0x0003000D,
2207 Xmm14 = 0x0003000E,
2208 Xmm15 = 0x0003000F,
2209 FpMmx0 = 0x00030010,
2210 FpMmx1 = 0x00030011,
2211 FpMmx2 = 0x00030012,
2212 FpMmx3 = 0x00030013,
2213 FpMmx4 = 0x00030014,
2214 FpMmx5 = 0x00030015,
2215 FpMmx6 = 0x00030016,
2216 FpMmx7 = 0x00030017,
2217 FpControlStatus = 0x00030018,
2218 XmmControlStatus = 0x00030019,
2219
2220 Cr0 = 0x00040000,
2222 Cr2 = 0x00040001,
2223 Cr3 = 0x00040002,
2224 Cr4 = 0x00040003,
2225 Cr8 = 0x00040004,
2226 Xfem = 0x00040005,
2227 IntermediateCr0 = 0x00041000,
2229 IntermediateCr3 = 0x00041002,
2230 IntermediateCr4 = 0x00041003,
2231 IntermediateCr8 = 0x00041004,
2232 Dr0 = 0x00050000,
2234 Dr1 = 0x00050001,
2235 Dr2 = 0x00050002,
2236 Dr3 = 0x00050003,
2237 Dr6 = 0x00050004,
2238 Dr7 = 0x00050005,
2239 Es = 0x00060000,
2241 Cs = 0x00060001,
2242 Ss = 0x00060002,
2243 Ds = 0x00060003,
2244 Fs = 0x00060004,
2245 Gs = 0x00060005,
2246 Ldtr = 0x00060006,
2247 Tr = 0x00060007,
2248 Idtr = 0x00070000,
2250 Gdtr = 0x00070001,
2251 Tsc = 0x00080000,
2253 Efer = 0x00080001,
2254 KernelGsBase = 0x00080002,
2255 ApicBase = 0x00080003,
2256 Pat = 0x00080004,
2257 SysenterCs = 0x00080005,
2258 SysenterEip = 0x00080006,
2259 SysenterEsp = 0x00080007,
2260 Star = 0x00080008,
2261 Lstar = 0x00080009,
2262 Cstar = 0x0008000a,
2263 Sfmask = 0x0008000b,
2264 InitialApicId = 0x0008000c,
2265 MsrMtrrCap = 0x0008000d,
2267 MsrMtrrDefType = 0x0008000e,
2268 MsrMtrrPhysBase0 = 0x00080010,
2269 MsrMtrrPhysBase1 = 0x00080011,
2270 MsrMtrrPhysBase2 = 0x00080012,
2271 MsrMtrrPhysBase3 = 0x00080013,
2272 MsrMtrrPhysBase4 = 0x00080014,
2273 MsrMtrrPhysBase5 = 0x00080015,
2274 MsrMtrrPhysBase6 = 0x00080016,
2275 MsrMtrrPhysBase7 = 0x00080017,
2276 MsrMtrrPhysBase8 = 0x00080018,
2277 MsrMtrrPhysBase9 = 0x00080019,
2278 MsrMtrrPhysBaseA = 0x0008001a,
2279 MsrMtrrPhysBaseB = 0x0008001b,
2280 MsrMtrrPhysBaseC = 0x0008001c,
2281 MsrMtrrPhysBaseD = 0x0008001d,
2282 MsrMtrrPhysBaseE = 0x0008001e,
2283 MsrMtrrPhysBaseF = 0x0008001f,
2284 MsrMtrrPhysMask0 = 0x00080040,
2285 MsrMtrrPhysMask1 = 0x00080041,
2286 MsrMtrrPhysMask2 = 0x00080042,
2287 MsrMtrrPhysMask3 = 0x00080043,
2288 MsrMtrrPhysMask4 = 0x00080044,
2289 MsrMtrrPhysMask5 = 0x00080045,
2290 MsrMtrrPhysMask6 = 0x00080046,
2291 MsrMtrrPhysMask7 = 0x00080047,
2292 MsrMtrrPhysMask8 = 0x00080048,
2293 MsrMtrrPhysMask9 = 0x00080049,
2294 MsrMtrrPhysMaskA = 0x0008004a,
2295 MsrMtrrPhysMaskB = 0x0008004b,
2296 MsrMtrrPhysMaskC = 0x0008004c,
2297 MsrMtrrPhysMaskD = 0x0008004d,
2298 MsrMtrrPhysMaskE = 0x0008004e,
2299 MsrMtrrPhysMaskF = 0x0008004f,
2300 MsrMtrrFix64k00000 = 0x00080070,
2301 MsrMtrrFix16k80000 = 0x00080071,
2302 MsrMtrrFix16kA0000 = 0x00080072,
2303 MsrMtrrFix4kC0000 = 0x00080073,
2304 MsrMtrrFix4kC8000 = 0x00080074,
2305 MsrMtrrFix4kD0000 = 0x00080075,
2306 MsrMtrrFix4kD8000 = 0x00080076,
2307 MsrMtrrFix4kE0000 = 0x00080077,
2308 MsrMtrrFix4kE8000 = 0x00080078,
2309 MsrMtrrFix4kF0000 = 0x00080079,
2310 MsrMtrrFix4kF8000 = 0x0008007a,
2311
2312 TscAux = 0x0008007B,
2313 Bndcfgs = 0x0008007C,
2314 DebugCtl = 0x0008007D,
2315 MCount = 0x0008007E,
2316 ACount = 0x0008007F,
2317
2318 SgxLaunchControl0 = 0x00080080,
2319 SgxLaunchControl1 = 0x00080081,
2320 SgxLaunchControl2 = 0x00080082,
2321 SgxLaunchControl3 = 0x00080083,
2322 SpecCtrl = 0x00080084,
2323 PredCmd = 0x00080085,
2324 VirtSpecCtrl = 0x00080086,
2325 TscVirtualOffset = 0x00080087,
2326 TsxCtrl = 0x00080088,
2327 MsrMcUpdatePatchLevel = 0x00080089,
2328 Available1 = 0x0008008A,
2329 Xss = 0x0008008B,
2330 UCet = 0x0008008C,
2331 SCet = 0x0008008D,
2332 Ssp = 0x0008008E,
2333 Pl0Ssp = 0x0008008F,
2334 Pl1Ssp = 0x00080090,
2335 Pl2Ssp = 0x00080091,
2336 Pl3Ssp = 0x00080092,
2337 InterruptSspTableAddr = 0x00080093,
2338 TscVirtualMultiplier = 0x00080094,
2339 TscDeadline = 0x00080095,
2340 TscAdjust = 0x00080096,
2341 Pasid = 0x00080097,
2342 UmwaitControl = 0x00080098,
2343 Xfd = 0x00080099,
2344 XfdErr = 0x0008009A,
2345
2346 Hypercall = 0x00090001,
2347 RegisterPage = 0x0009001C,
2348
2349 EmulatedTimerPeriod = 0x00090030,
2351 EmulatedTimerControl = 0x00090031,
2352 PmTimerAssist = 0x00090032,
2353
2354 SevControl = 0x00090040,
2356
2357 CrInterceptControl = 0x000E0000,
2358 CrInterceptCr0Mask = 0x000E0001,
2359 CrInterceptCr4Mask = 0x000E0002,
2360 CrInterceptIa32MiscEnableMask = 0x000E0003,
2361 }
2362}
2363
2364registers! {
2365 HvArm64RegisterName {
2366 HypervisorVersion = 0x00000100,
2367 PrivilegesAndFeaturesInfo = 0x00000200,
2368 FeaturesInfo = 0x00000201,
2369 ImplementationLimitsInfo = 0x00000202,
2370 HardwareFeaturesInfo = 0x00000203,
2371 CpuManagementFeaturesInfo = 0x00000204,
2372 PasidFeaturesInfo = 0x00000205,
2373 SkipLevelFeaturesInfo = 0x00000206,
2374 NestedVirtFeaturesInfo = 0x00000207,
2375 IptFeaturesInfo = 0x00000208,
2376 IsolationConfiguration = 0x00000209,
2377
2378 X0 = 0x00020000,
2379 X1 = 0x00020001,
2380 X2 = 0x00020002,
2381 X3 = 0x00020003,
2382 X4 = 0x00020004,
2383 X5 = 0x00020005,
2384 X6 = 0x00020006,
2385 X7 = 0x00020007,
2386 X8 = 0x00020008,
2387 X9 = 0x00020009,
2388 X10 = 0x0002000A,
2389 X11 = 0x0002000B,
2390 X12 = 0x0002000C,
2391 X13 = 0x0002000D,
2392 X14 = 0x0002000E,
2393 X15 = 0x0002000F,
2394 X16 = 0x00020010,
2395 X17 = 0x00020011,
2396 X18 = 0x00020012,
2397 X19 = 0x00020013,
2398 X20 = 0x00020014,
2399 X21 = 0x00020015,
2400 X22 = 0x00020016,
2401 X23 = 0x00020017,
2402 X24 = 0x00020018,
2403 X25 = 0x00020019,
2404 X26 = 0x0002001A,
2405 X27 = 0x0002001B,
2406 X28 = 0x0002001C,
2407 XFp = 0x0002001D,
2408 XLr = 0x0002001E,
2409 XSp = 0x0002001F, XSpEl0 = 0x00020020,
2411 XSpElx = 0x00020021,
2412 XPc = 0x00020022,
2413 Cpsr = 0x00020023,
2414 SpsrEl2 = 0x00021002,
2415
2416 SctlrEl1 = 0x00040002,
2417 Ttbr0El1 = 0x00040005,
2418 Ttbr1El1 = 0x00040006,
2419 TcrEl1 = 0x00040007,
2420 EsrEl1 = 0x00040008,
2421 FarEl1 = 0x00040009,
2422 MairEl1 = 0x0004000b,
2423 VbarEl1 = 0x0004000c,
2424 ElrEl1 = 0x00040015,
2425 }
2426}
2427
2428#[repr(C)]
2429#[derive(Clone, Copy, Debug, Eq, PartialEq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2430pub struct HvRegisterValue(pub AlignedU128);
2431
2432impl HvRegisterValue {
2433 pub fn as_u128(&self) -> u128 {
2434 self.0.into()
2435 }
2436
2437 pub fn as_u64(&self) -> u64 {
2438 self.as_u128() as u64
2439 }
2440
2441 pub fn as_u32(&self) -> u32 {
2442 self.as_u128() as u32
2443 }
2444
2445 pub fn as_u16(&self) -> u16 {
2446 self.as_u128() as u16
2447 }
2448
2449 pub fn as_u8(&self) -> u8 {
2450 self.as_u128() as u8
2451 }
2452
2453 pub fn as_table(&self) -> HvX64TableRegister {
2454 HvX64TableRegister::read_from_prefix(self.as_bytes())
2455 .unwrap()
2456 .0 }
2458
2459 pub fn as_segment(&self) -> HvX64SegmentRegister {
2460 HvX64SegmentRegister::read_from_prefix(self.as_bytes())
2461 .unwrap()
2462 .0 }
2464}
2465
2466impl From<u8> for HvRegisterValue {
2467 fn from(val: u8) -> Self {
2468 (val as u128).into()
2469 }
2470}
2471
2472impl From<u16> for HvRegisterValue {
2473 fn from(val: u16) -> Self {
2474 (val as u128).into()
2475 }
2476}
2477
2478impl From<u32> for HvRegisterValue {
2479 fn from(val: u32) -> Self {
2480 (val as u128).into()
2481 }
2482}
2483
2484impl From<u64> for HvRegisterValue {
2485 fn from(val: u64) -> Self {
2486 (val as u128).into()
2487 }
2488}
2489
2490impl From<u128> for HvRegisterValue {
2491 fn from(val: u128) -> Self {
2492 Self(val.into())
2493 }
2494}
2495
2496#[repr(C)]
2497#[derive(Clone, Copy, Debug, Eq, PartialEq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2498pub struct HvX64TableRegister {
2499 pub pad: [u16; 3],
2500 pub limit: u16,
2501 pub base: u64,
2502}
2503
2504impl From<HvX64TableRegister> for HvRegisterValue {
2505 fn from(val: HvX64TableRegister) -> Self {
2506 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2508}
2509
2510impl From<HvRegisterValue> for HvX64TableRegister {
2511 fn from(val: HvRegisterValue) -> Self {
2512 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2514}
2515
2516#[repr(C)]
2517#[derive(Clone, Copy, Debug, Eq, PartialEq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2518pub struct HvX64SegmentRegister {
2519 pub base: u64,
2520 pub limit: u32,
2521 pub selector: u16,
2522 pub attributes: u16,
2523}
2524
2525impl From<HvX64SegmentRegister> for HvRegisterValue {
2526 fn from(val: HvX64SegmentRegister) -> Self {
2527 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2529}
2530
2531impl From<HvRegisterValue> for HvX64SegmentRegister {
2532 fn from(val: HvRegisterValue) -> Self {
2533 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2535}
2536
2537#[bitfield(u64)]
2538#[derive(IntoBytes, Immutable, KnownLayout, FromBytes, PartialEq, Eq)]
2539pub struct HvDeliverabilityNotificationsRegister {
2540 pub nmi_notification: bool,
2542 pub interrupt_notification: bool,
2544 #[bits(4)]
2546 pub interrupt_priority: u8,
2548 #[bits(42)]
2549 pub reserved: u64,
2550 pub sints: u16,
2551}
2552
2553open_enum! {
2554 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2555 pub enum HvVtlEntryReason: u32 {
2556 RESERVED = 0,
2558
2559 VTL_CALL = 1,
2561
2562 INTERRUPT = 2,
2564
2565 INTERCEPT = 3,
2567 }
2568}
2569
2570#[repr(C)]
2571#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2572pub struct HvVpVtlControl {
2573 pub entry_reason: HvVtlEntryReason,
2578
2579 pub vina_status: u8,
2581 pub reserved_z0: u8,
2582 pub reserved_z1: u16,
2583
2584 pub registers: [u64; 2],
2589}
2590
2591#[bitfield(u64)]
2592#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2593pub struct HvRegisterVsmVina {
2594 pub vector: u8,
2595 pub enabled: bool,
2596 pub auto_reset: bool,
2597 pub auto_eoi: bool,
2598 #[bits(53)]
2599 pub reserved: u64,
2600}
2601
2602#[repr(C)]
2603#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2604pub struct HvVpAssistPage {
2605 pub apic_assist: u32,
2607 pub reserved_z0: u32,
2608
2609 pub vtl_control: HvVpVtlControl,
2611
2612 pub nested_enlightenments_control: u64,
2613 pub enlighten_vm_entry: u8,
2614 pub reserved_z1: [u8; 7],
2615 pub current_nested_vmcs: u64,
2616 pub synthetic_time_unhalted_timer_expired: u8,
2617 pub reserved_z2: [u8; 7],
2618 pub virtualization_fault_information: [u8; 40],
2619 pub reserved_z3: u64,
2620 pub intercept_message: HvMessage,
2621 pub vtl_return_actions: [u8; 256],
2622}
2623
2624#[repr(C)]
2625#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2626pub struct HvVpAssistPageActionSignalEvent {
2627 pub action_type: u64,
2628 pub target_vp: u32,
2629 pub target_vtl: u8,
2630 pub target_sint: u8,
2631 pub flag_number: u16,
2632}
2633
2634open_enum! {
2635 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2636 pub enum HvInterceptAccessType: u8 {
2637 READ = 0,
2638 WRITE = 1,
2639 EXECUTE = 2,
2640 }
2641}
2642
2643#[bitfield(u16)]
2644#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2645pub struct HvX64VpExecutionState {
2646 #[bits(2)]
2647 pub cpl: u8,
2648 pub cr0_pe: bool,
2649 pub cr0_am: bool,
2650 pub efer_lma: bool,
2651 pub debug_active: bool,
2652 pub interruption_pending: bool,
2653 #[bits(4)]
2654 pub vtl: u8,
2655 pub enclave_mode: bool,
2656 pub interrupt_shadow: bool,
2657 pub virtualization_fault_active: bool,
2658 #[bits(2)]
2659 pub reserved: u8,
2660}
2661
2662#[bitfield(u16)]
2663#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2664pub struct HvArm64VpExecutionState {
2665 #[bits(2)]
2666 pub cpl: u8,
2667 pub debug_active: bool,
2668 pub interruption_pending: bool,
2669 #[bits(4)]
2670 pub vtl: u8,
2671 pub virtualization_fault_active: bool,
2672 #[bits(7)]
2673 pub reserved: u8,
2674}
2675
2676#[repr(C)]
2677#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2678pub struct HvX64InterceptMessageHeader {
2679 pub vp_index: u32,
2680 pub instruction_length_and_cr8: u8,
2681 pub intercept_access_type: HvInterceptAccessType,
2682 pub execution_state: HvX64VpExecutionState,
2683 pub cs_segment: HvX64SegmentRegister,
2684 pub rip: u64,
2685 pub rflags: u64,
2686}
2687
2688impl MessagePayload for HvX64InterceptMessageHeader {}
2689
2690impl HvX64InterceptMessageHeader {
2691 pub fn instruction_len(&self) -> u8 {
2692 self.instruction_length_and_cr8 & 0xf
2693 }
2694
2695 pub fn cr8(&self) -> u8 {
2696 self.instruction_length_and_cr8 >> 4
2697 }
2698}
2699
2700#[repr(C)]
2701#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2702pub struct HvArm64InterceptMessageHeader {
2703 pub vp_index: u32,
2704 pub instruction_length: u8,
2705 pub intercept_access_type: HvInterceptAccessType,
2706 pub execution_state: HvArm64VpExecutionState,
2707 pub pc: u64,
2708 pub cspr: u64,
2709}
2710const_assert!(size_of::<HvArm64InterceptMessageHeader>() == 0x18);
2711
2712impl MessagePayload for HvArm64InterceptMessageHeader {}
2713
2714#[repr(transparent)]
2715#[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2716pub struct HvX64IoPortAccessInfo(pub u8);
2717
2718impl HvX64IoPortAccessInfo {
2719 pub fn new(access_size: u8, string_op: bool, rep_prefix: bool) -> Self {
2720 let mut info = access_size & 0x7;
2721
2722 if string_op {
2723 info |= 0x8;
2724 }
2725
2726 if rep_prefix {
2727 info |= 0x10;
2728 }
2729
2730 Self(info)
2731 }
2732
2733 pub fn access_size(&self) -> u8 {
2734 self.0 & 0x7
2735 }
2736
2737 pub fn string_op(&self) -> bool {
2738 self.0 & 0x8 != 0
2739 }
2740
2741 pub fn rep_prefix(&self) -> bool {
2742 self.0 & 0x10 != 0
2743 }
2744}
2745
2746#[repr(C)]
2747#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2748pub struct HvX64IoPortInterceptMessage {
2749 pub header: HvX64InterceptMessageHeader,
2750 pub port_number: u16,
2751 pub access_info: HvX64IoPortAccessInfo,
2752 pub instruction_byte_count: u8,
2753 pub reserved: u32,
2754 pub rax: u64,
2755 pub instruction_bytes: [u8; 16],
2756 pub ds_segment: HvX64SegmentRegister,
2757 pub es_segment: HvX64SegmentRegister,
2758 pub rcx: u64,
2759 pub rsi: u64,
2760 pub rdi: u64,
2761}
2762
2763impl MessagePayload for HvX64IoPortInterceptMessage {}
2764
2765#[bitfield(u8)]
2766#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2767pub struct HvX64MemoryAccessInfo {
2768 pub gva_valid: bool,
2769 pub gva_gpa_valid: bool,
2770 pub hypercall_output_pending: bool,
2771 pub tlb_locked: bool,
2772 pub supervisor_shadow_stack: bool,
2773 #[bits(3)]
2774 pub reserved1: u8,
2775}
2776
2777#[bitfield(u8)]
2778#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2779pub struct HvArm64MemoryAccessInfo {
2780 pub gva_valid: bool,
2781 pub gva_gpa_valid: bool,
2782 pub hypercall_output_pending: bool,
2783 #[bits(5)]
2784 pub reserved1: u8,
2785}
2786
2787open_enum! {
2788 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2789 pub enum HvCacheType: u32 {
2790 #![expect(non_upper_case_globals)]
2791 HvCacheTypeUncached = 0,
2792 HvCacheTypeWriteCombining = 1,
2793 HvCacheTypeWriteThrough = 4,
2794 HvCacheTypeWriteProtected = 5,
2795 HvCacheTypeWriteBack = 6,
2796 }
2797}
2798
2799#[repr(C)]
2800#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2801pub struct HvX64MemoryInterceptMessage {
2802 pub header: HvX64InterceptMessageHeader,
2803 pub cache_type: HvCacheType,
2804 pub instruction_byte_count: u8,
2805 pub memory_access_info: HvX64MemoryAccessInfo,
2806 pub tpr_priority: u8,
2807 pub reserved: u8,
2808 pub guest_virtual_address: u64,
2809 pub guest_physical_address: u64,
2810 pub instruction_bytes: [u8; 16],
2811}
2812
2813impl MessagePayload for HvX64MemoryInterceptMessage {}
2814const_assert!(size_of::<HvX64MemoryInterceptMessage>() == 0x50);
2815
2816#[repr(C)]
2817#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2818pub struct HvArm64MemoryInterceptMessage {
2819 pub header: HvArm64InterceptMessageHeader,
2820 pub cache_type: HvCacheType,
2821 pub instruction_byte_count: u8,
2822 pub memory_access_info: HvArm64MemoryAccessInfo,
2823 pub reserved1: u16,
2824 pub instruction_bytes: [u8; 4],
2825 pub reserved2: u32,
2826 pub guest_virtual_address: u64,
2827 pub guest_physical_address: u64,
2828 pub syndrome: u64,
2829}
2830
2831impl MessagePayload for HvArm64MemoryInterceptMessage {}
2832const_assert!(size_of::<HvArm64MemoryInterceptMessage>() == 0x40);
2833
2834#[repr(C)]
2835#[derive(Debug, FromBytes, IntoBytes, Immutable, KnownLayout)]
2836pub struct HvArm64MmioInterceptMessage {
2837 pub header: HvArm64InterceptMessageHeader,
2838 pub guest_physical_address: u64,
2839 pub access_size: u32,
2840 pub data: [u8; 32],
2841 pub padding: u32,
2842}
2843
2844impl MessagePayload for HvArm64MmioInterceptMessage {}
2845const_assert!(size_of::<HvArm64MmioInterceptMessage>() == 0x48);
2846
2847#[repr(C)]
2848#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2849pub struct HvX64MsrInterceptMessage {
2850 pub header: HvX64InterceptMessageHeader,
2851 pub msr_number: u32,
2852 pub reserved: u32,
2853 pub rdx: u64,
2854 pub rax: u64,
2855}
2856
2857impl MessagePayload for HvX64MsrInterceptMessage {}
2858
2859#[repr(C)]
2860#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2861pub struct HvX64SipiInterceptMessage {
2862 pub header: HvX64InterceptMessageHeader,
2863 pub target_vp_index: u32,
2864 pub vector: u32,
2865}
2866
2867impl MessagePayload for HvX64SipiInterceptMessage {}
2868
2869#[repr(C)]
2870#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2871pub struct HvX64SynicSintDeliverableMessage {
2872 pub header: HvX64InterceptMessageHeader,
2873 pub deliverable_sints: u16,
2874 pub rsvd1: u16,
2875 pub rsvd2: u32,
2876}
2877
2878impl MessagePayload for HvX64SynicSintDeliverableMessage {}
2879
2880#[repr(C)]
2881#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2882pub struct HvArm64SynicSintDeliverableMessage {
2883 pub header: HvArm64InterceptMessageHeader,
2884 pub deliverable_sints: u16,
2885 pub rsvd1: u16,
2886 pub rsvd2: u32,
2887}
2888
2889impl MessagePayload for HvArm64SynicSintDeliverableMessage {}
2890
2891#[repr(C)]
2892#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2893pub struct HvX64InterruptionDeliverableMessage {
2894 pub header: HvX64InterceptMessageHeader,
2895 pub deliverable_type: HvX64PendingInterruptionType,
2896 pub rsvd: [u8; 3],
2897 pub rsvd2: u32,
2898}
2899
2900impl MessagePayload for HvX64InterruptionDeliverableMessage {}
2901
2902open_enum! {
2903 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2904 pub enum HvX64PendingInterruptionType: u8 {
2905 HV_X64_PENDING_INTERRUPT = 0,
2906 HV_X64_PENDING_NMI = 2,
2907 HV_X64_PENDING_EXCEPTION = 3,
2908 HV_X64_PENDING_SOFTWARE_INTERRUPT = 4,
2909 HV_X64_PENDING_PRIVILEGED_SOFTWARE_EXCEPTION = 5,
2910 HV_X64_PENDING_SOFTWARE_EXCEPTION = 6,
2911 }
2912}
2913
2914#[repr(C)]
2915#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2916pub struct HvX64HypercallInterceptMessage {
2917 pub header: HvX64InterceptMessageHeader,
2918 pub rax: u64,
2919 pub rbx: u64,
2920 pub rcx: u64,
2921 pub rdx: u64,
2922 pub r8: u64,
2923 pub rsi: u64,
2924 pub rdi: u64,
2925 pub xmm_registers: [AlignedU128; 6],
2926 pub flags: HvHypercallInterceptMessageFlags,
2927 pub rsvd2: [u32; 3],
2928}
2929
2930impl MessagePayload for HvX64HypercallInterceptMessage {}
2931
2932#[repr(C)]
2933#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2934pub struct HvArm64HypercallInterceptMessage {
2935 pub header: HvArm64InterceptMessageHeader,
2936 pub immediate: u16,
2937 pub reserved: u16,
2938 pub flags: HvHypercallInterceptMessageFlags,
2939 pub x: [u64; 18],
2940}
2941
2942impl MessagePayload for HvArm64HypercallInterceptMessage {}
2943
2944#[bitfield(u32)]
2945#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2946pub struct HvHypercallInterceptMessageFlags {
2947 pub is_isolated: bool,
2948 #[bits(31)]
2949 _reserved: u32,
2950}
2951
2952#[repr(C)]
2953#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2954pub struct HvX64CpuidInterceptMessage {
2955 pub header: HvX64InterceptMessageHeader,
2956 pub rax: u64,
2957 pub rcx: u64,
2958 pub rdx: u64,
2959 pub rbx: u64,
2960 pub default_result_rax: u64,
2961 pub default_result_rcx: u64,
2962 pub default_result_rdx: u64,
2963 pub default_result_rbx: u64,
2964}
2965
2966impl MessagePayload for HvX64CpuidInterceptMessage {}
2967
2968#[bitfield(u8)]
2969#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2970pub struct HvX64ExceptionInfo {
2971 pub error_code_valid: bool,
2972 pub software_exception: bool,
2973 #[bits(6)]
2974 reserved: u8,
2975}
2976
2977#[repr(C)]
2978#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2979pub struct HvX64ExceptionInterceptMessage {
2980 pub header: HvX64InterceptMessageHeader,
2981 pub vector: u16,
2982 pub exception_info: HvX64ExceptionInfo,
2983 pub instruction_byte_count: u8,
2984 pub error_code: u32,
2985 pub exception_parameter: u64,
2986 pub reserved: u64,
2987 pub instruction_bytes: [u8; 16],
2988 pub ds_segment: HvX64SegmentRegister,
2989 pub ss_segment: HvX64SegmentRegister,
2990 pub rax: u64,
2991 pub rcx: u64,
2992 pub rdx: u64,
2993 pub rbx: u64,
2994 pub rsp: u64,
2995 pub rbp: u64,
2996 pub rsi: u64,
2997 pub rdi: u64,
2998 pub r8: u64,
2999 pub r9: u64,
3000 pub r10: u64,
3001 pub r11: u64,
3002 pub r12: u64,
3003 pub r13: u64,
3004 pub r14: u64,
3005 pub r15: u64,
3006}
3007
3008impl MessagePayload for HvX64ExceptionInterceptMessage {}
3009
3010#[repr(C)]
3011#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3012pub struct HvInvalidVpRegisterMessage {
3013 pub vp_index: u32,
3014 pub reserved: u32,
3015}
3016
3017impl MessagePayload for HvInvalidVpRegisterMessage {}
3018
3019#[repr(C)]
3020#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3021pub struct HvX64ApicEoiMessage {
3022 pub vp_index: u32,
3023 pub interrupt_vector: u32,
3024}
3025
3026impl MessagePayload for HvX64ApicEoiMessage {}
3027
3028#[repr(C)]
3029#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3030pub struct HvX64UnrecoverableExceptionMessage {
3031 pub header: HvX64InterceptMessageHeader,
3032}
3033
3034impl MessagePayload for HvX64UnrecoverableExceptionMessage {}
3035
3036#[repr(C)]
3037#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3038pub struct HvX64HaltMessage {
3039 pub header: HvX64InterceptMessageHeader,
3040}
3041
3042impl MessagePayload for HvX64HaltMessage {}
3043
3044#[repr(C)]
3045#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3046pub struct HvArm64ResetInterceptMessage {
3047 pub header: HvArm64InterceptMessageHeader,
3048 pub reset_type: HvArm64ResetType,
3049 pub reset_code: u32,
3050}
3051
3052impl MessagePayload for HvArm64ResetInterceptMessage {}
3053
3054open_enum! {
3055 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3056 pub enum HvArm64ResetType: u32 {
3057 POWER_OFF = 0,
3058 REBOOT = 1,
3059 SYSTEM_RESET = 2,
3060 HIBERNATE = 3,
3061 }
3062}
3063
3064#[bitfield(u8)]
3065#[derive(IntoBytes, Immutable, FromBytes)]
3066pub struct HvX64RegisterInterceptMessageFlags {
3067 pub is_memory_op: bool,
3068 #[bits(7)]
3069 _rsvd: u8,
3070}
3071
3072#[repr(C)]
3073#[derive(IntoBytes, Immutable, FromBytes)]
3074pub struct HvX64RegisterInterceptMessage {
3075 pub header: HvX64InterceptMessageHeader,
3076 pub flags: HvX64RegisterInterceptMessageFlags,
3077 pub rsvd: u8,
3078 pub rsvd2: u16,
3079 pub register_name: HvX64RegisterName,
3080 pub access_info: HvX64RegisterAccessInfo,
3081}
3082
3083#[repr(transparent)]
3084#[derive(IntoBytes, Immutable, FromBytes)]
3085pub struct HvX64RegisterAccessInfo(u128);
3086
3087impl HvX64RegisterAccessInfo {
3088 pub fn new_source_value(source_value: HvRegisterValue) -> Self {
3089 Self(source_value.as_u128())
3090 }
3091}
3092
3093open_enum! {
3094 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3095 pub enum HvInterruptType : u32 {
3096 #![expect(non_upper_case_globals)]
3097 HvArm64InterruptTypeFixed = 0x0000,
3098 HvX64InterruptTypeFixed = 0x0000,
3099 HvX64InterruptTypeLowestPriority = 0x0001,
3100 HvX64InterruptTypeSmi = 0x0002,
3101 HvX64InterruptTypeRemoteRead = 0x0003,
3102 HvX64InterruptTypeNmi = 0x0004,
3103 HvX64InterruptTypeInit = 0x0005,
3104 HvX64InterruptTypeSipi = 0x0006,
3105 HvX64InterruptTypeExtInt = 0x0007,
3106 HvX64InterruptTypeLocalInt0 = 0x0008,
3107 HvX64InterruptTypeLocalInt1 = 0x0009,
3108 }
3109}
3110
3111#[bitfield(u64)]
3118#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3119pub struct HvInterruptControl {
3120 interrupt_type_value: u32,
3121 pub x86_level_triggered: bool,
3122 pub x86_logical_destination_mode: bool,
3123 pub arm64_asserted: bool,
3124 #[bits(29)]
3125 pub unused: u32,
3126}
3127
3128impl HvInterruptControl {
3129 pub fn interrupt_type(&self) -> HvInterruptType {
3130 HvInterruptType(self.interrupt_type_value())
3131 }
3132
3133 pub fn set_interrupt_type(&mut self, ty: HvInterruptType) {
3134 self.set_interrupt_type_value(ty.0)
3135 }
3136
3137 pub fn with_interrupt_type(self, ty: HvInterruptType) -> Self {
3138 self.with_interrupt_type_value(ty.0)
3139 }
3140}
3141
3142#[bitfield(u64)]
3143pub struct HvRegisterVsmCapabilities {
3144 pub dr6_shared: bool,
3145 pub mbec_vtl_mask: u16,
3146 pub deny_lower_vtl_startup: bool,
3147 pub supervisor_shadow_stack: bool,
3148 pub hardware_hvpt_available: bool,
3149 pub software_hvpt_available: bool,
3150 #[bits(6)]
3151 pub hardware_hvpt_range_bits: u8,
3152 pub intercept_page_available: bool,
3153 pub return_action_available: bool,
3154 pub vtl0_alias_map_available: bool,
3159 pub intercept_not_present_available: bool,
3164 pub install_intercept_ex: bool,
3165 pub intercept_system_reset_available: bool,
3167 #[bits(1)]
3168 pub reserved1: u8,
3169 pub proxy_interrupt_redirect_available: bool,
3170 #[bits(29)]
3171 pub reserved2: u64,
3172}
3173
3174#[bitfield(u64)]
3175pub struct HvRegisterVsmPartitionConfig {
3176 pub enable_vtl_protection: bool,
3177 #[bits(4)]
3178 pub default_vtl_protection_mask: u8,
3179 pub zero_memory_on_reset: bool,
3180 pub deny_lower_vtl_startup: bool,
3181 pub intercept_acceptance: bool,
3182 pub intercept_enable_vtl_protection: bool,
3183 pub intercept_vp_startup: bool,
3184 pub intercept_cpuid_unimplemented: bool,
3185 pub intercept_unrecoverable_exception: bool,
3186 pub intercept_page: bool,
3187 pub intercept_restore_partition_time: bool,
3188 pub intercept_not_present: bool,
3191 pub intercept_system_reset: bool,
3192 #[bits(48)]
3193 pub reserved: u64,
3194}
3195
3196#[bitfield(u64)]
3197pub struct HvRegisterVsmPartitionStatus {
3198 #[bits(16)]
3199 pub enabled_vtl_set: u16,
3200 #[bits(4)]
3201 pub maximum_vtl: u8,
3202 #[bits(16)]
3203 pub mbec_enabled_vtl_set: u16,
3204 #[bits(4)]
3205 pub supervisor_shadow_stack_enabled_vtl_set: u8,
3206 #[bits(24)]
3207 pub reserved: u64,
3208}
3209
3210#[bitfield(u64)]
3211pub struct HvRegisterGuestVsmPartitionConfig {
3212 #[bits(4)]
3213 pub maximum_vtl: u8,
3214 #[bits(60)]
3215 pub reserved: u64,
3216}
3217
3218#[bitfield(u64)]
3219pub struct HvRegisterVsmVpStatus {
3220 #[bits(4)]
3221 pub active_vtl: u8,
3222 pub active_mbec_enabled: bool,
3223 #[bits(11)]
3224 pub reserved_mbz0: u16,
3225 #[bits(16)]
3226 pub enabled_vtl_set: u16,
3227 #[bits(32)]
3228 pub reserved_mbz1: u32,
3229}
3230
3231#[bitfield(u64)]
3232pub struct HvRegisterVsmCodePageOffsets {
3233 #[bits(12)]
3234 pub call_offset: u16,
3235 #[bits(12)]
3236 pub return_offset: u16,
3237 #[bits(40)]
3238 pub reserved: u64,
3239}
3240
3241#[repr(C)]
3242#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3243pub struct HvStimerState {
3244 pub undelivered_message_pending: u32,
3245 pub reserved: u32,
3246 pub config: u64,
3247 pub count: u64,
3248 pub adjustment: u64,
3249 pub undelivered_expiration_time: u64,
3250}
3251
3252#[repr(C)]
3253#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3254pub struct HvSyntheticTimersState {
3255 pub timers: [HvStimerState; 4],
3256 pub reserved: [u64; 5],
3257}
3258
3259#[bitfield(u64)]
3260pub struct HvInternalActivityRegister {
3261 pub startup_suspend: bool,
3262 pub halt_suspend: bool,
3263 pub idle_suspend: bool,
3264 #[bits(61)]
3265 pub reserved: u64,
3266}
3267
3268#[bitfield(u64)]
3269pub struct HvSynicSint {
3270 pub vector: u8,
3271 _reserved: u8,
3272 pub masked: bool,
3273 pub auto_eoi: bool,
3274 pub polling: bool,
3275 _reserved2: bool,
3276 pub proxy: bool,
3277 #[bits(43)]
3278 _reserved2: u64,
3279}
3280
3281#[bitfield(u64)]
3282pub struct HvSynicScontrol {
3283 pub enabled: bool,
3284 #[bits(63)]
3285 _reserved: u64,
3286}
3287
3288#[bitfield(u64)]
3289pub struct HvSynicSimpSiefp {
3290 pub enabled: bool,
3291 #[bits(11)]
3292 _reserved: u64,
3293 #[bits(52)]
3294 pub base_gpn: u64,
3295}
3296
3297#[bitfield(u64)]
3298pub struct HvSynicStimerConfig {
3299 pub enabled: bool,
3300 pub periodic: bool,
3301 pub lazy: bool,
3302 pub auto_enable: bool,
3303 pub apic_vector: u8,
3305 pub direct_mode: bool,
3306 #[bits(3)]
3307 pub _reserved1: u8,
3308 #[bits(4)]
3309 pub sint: u8,
3310 #[bits(44)]
3311 pub _reserved2: u64,
3312}
3313
3314pub const HV_X64_PENDING_EVENT_EXCEPTION: u8 = 0;
3315pub const HV_X64_PENDING_EVENT_MEMORY_INTERCEPT: u8 = 1;
3316pub const HV_X64_PENDING_EVENT_NESTED_MEMORY_INTERCEPT: u8 = 2;
3317pub const HV_X64_PENDING_EVENT_VIRTUALIZATION_FAULT: u8 = 3;
3318pub const HV_X64_PENDING_EVENT_HYPERCALL_OUTPUT: u8 = 4;
3319pub const HV_X64_PENDING_EVENT_EXT_INT: u8 = 5;
3320pub const HV_X64_PENDING_EVENT_SHADOW_IPT: u8 = 6;
3321
3322#[bitfield(u128)]
3324pub struct HvX64PendingExceptionEvent {
3325 pub event_pending: bool,
3326 #[bits(3)]
3327 pub event_type: u8,
3328 #[bits(4)]
3329 pub reserved0: u8,
3330
3331 pub deliver_error_code: bool,
3332 #[bits(7)]
3333 pub reserved1: u8,
3334 pub vector: u16,
3335 pub error_code: u32,
3336 pub exception_parameter: u64,
3337}
3338
3339#[bitfield(u128)]
3341pub struct HvX64PendingVirtualizationFaultEvent {
3342 pub event_pending: bool,
3343 #[bits(3)]
3344 pub event_type: u8,
3345 #[bits(4)]
3346 pub reserved0: u8,
3347
3348 pub reserved1: u8,
3349 pub parameter0: u16,
3350 pub code: u32,
3351 pub parameter1: u64,
3352}
3353
3354#[bitfield(u8)]
3356#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3357pub struct HvX64PendingEventMemoryInterceptPendingEventHeader {
3358 pub event_pending: bool,
3359 #[bits(3)]
3360 pub event_type: u8,
3361 #[bits(4)]
3362 _reserved0: u8,
3363}
3364
3365#[bitfield(u8)]
3367#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3368pub struct HvX64PendingEventMemoryInterceptAccessFlags {
3369 pub guest_linear_address_valid: bool,
3371 pub caused_by_gpa_access: bool,
3374 #[bits(6)]
3375 _reserved1: u8,
3376}
3377
3378#[repr(C)]
3380#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3381pub struct HvX64PendingEventMemoryIntercept {
3382 pub event_header: HvX64PendingEventMemoryInterceptPendingEventHeader,
3383 pub target_vtl: u8,
3386 pub access_type: HvInterceptAccessType,
3388 pub access_flags: HvX64PendingEventMemoryInterceptAccessFlags,
3389 pub _reserved2: u32,
3390 pub guest_linear_address: u64,
3392 pub guest_physical_address: u64,
3394 pub _reserved3: u64,
3395}
3396const_assert!(size_of::<HvX64PendingEventMemoryIntercept>() == 0x20);
3397
3398#[bitfield(u128)]
3402pub struct HvX64PendingHypercallOutputEvent {
3403 pub event_pending: bool,
3404 #[bits(3)]
3405 pub event_type: u8,
3406 #[bits(4)]
3407 pub reserved0: u8,
3408
3409 pub retired: bool,
3411
3412 #[bits(23)]
3413 pub reserved1: u32,
3414
3415 pub output_size: u32,
3417
3418 pub output_gpa: u64,
3420}
3421
3422#[bitfield(u128)]
3424pub struct HvX64PendingExtIntEvent {
3425 pub event_pending: bool,
3426 #[bits(3)]
3427 pub event_type: u8,
3428 #[bits(4)]
3429 pub reserved0: u8,
3430 pub vector: u8,
3431 #[bits(48)]
3432 pub reserved1: u64,
3433 pub reserved2: u64,
3434}
3435
3436#[bitfield(u128)]
3438pub struct HvX64PendingShadowIptEvent {
3439 pub event_pending: bool,
3440 #[bits(4)]
3441 pub event_type: u8,
3442 #[bits(59)]
3443 pub reserved0: u64,
3444
3445 pub reserved1: u64,
3446}
3447
3448#[bitfield(u128)]
3449#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3450pub struct HvX64PendingEventReg0 {
3451 pub event_pending: bool,
3452 #[bits(3)]
3453 pub event_type: u8,
3454 #[bits(4)]
3455 pub reserved: u8,
3456 #[bits(120)]
3457 pub data: u128,
3458}
3459
3460#[repr(C)]
3461#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3462pub struct HvX64PendingEvent {
3463 pub reg_0: HvX64PendingEventReg0,
3464 pub reg_1: AlignedU128,
3465}
3466const_assert!(size_of::<HvX64PendingEvent>() == 0x20);
3467
3468impl From<HvX64PendingExceptionEvent> for HvX64PendingEvent {
3469 fn from(exception_event: HvX64PendingExceptionEvent) -> Self {
3470 HvX64PendingEvent {
3471 reg_0: HvX64PendingEventReg0::from(u128::from(exception_event)),
3472 reg_1: 0u128.into(),
3473 }
3474 }
3475}
3476
3477#[bitfield(u64)]
3478#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3479pub struct HvX64PendingInterruptionRegister {
3480 pub interruption_pending: bool,
3481 #[bits(3)]
3482 pub interruption_type: u8,
3483 pub deliver_error_code: bool,
3484 #[bits(4)]
3485 pub instruction_length: u8,
3486 pub nested_event: bool,
3487 #[bits(6)]
3488 pub reserved: u8,
3489 pub interruption_vector: u16,
3490 pub error_code: u32,
3491}
3492
3493#[bitfield(u64)]
3494#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3495pub struct HvX64InterruptStateRegister {
3496 pub interrupt_shadow: bool,
3497 pub nmi_masked: bool,
3498 #[bits(62)]
3499 pub reserved: u64,
3500}
3501
3502#[bitfield(u64)]
3503#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3504pub struct HvInstructionEmulatorHintsRegister {
3505 pub partition_secure_vtl_enabled: bool,
3507 pub mbec_user_execute_control: bool,
3510 #[bits(62)]
3511 pub _padding: u64,
3512}
3513
3514open_enum! {
3515 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3516 pub enum HvAarch64PendingEventType: u8 {
3517 EXCEPTION = 0,
3518 SYNTHETIC_EXCEPTION = 1,
3519 HYPERCALL_OUTPUT = 2,
3520 }
3521}
3522
3523impl HvAarch64PendingEventType {
3525 const fn from_bits(val: u8) -> Self {
3526 HvAarch64PendingEventType(val)
3527 }
3528
3529 const fn into_bits(self) -> u8 {
3530 self.0
3531 }
3532}
3533
3534#[bitfield[u8]]
3535#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3536pub struct HvAarch64PendingEventHeader {
3537 #[bits(1)]
3538 pub event_pending: bool,
3539 #[bits(3)]
3540 pub event_type: HvAarch64PendingEventType,
3541 #[bits(4)]
3542 pub reserved: u8,
3543}
3544
3545#[repr(C)]
3546#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3547pub struct HvAarch64PendingExceptionEvent {
3548 pub header: HvAarch64PendingEventHeader,
3549 pub _padding: [u8; 7],
3550 pub syndrome: u64,
3551 pub fault_address: u64,
3552}
3553
3554#[bitfield[u8]]
3555#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3556pub struct HvAarch64PendingHypercallOutputEventFlags {
3557 #[bits(1)]
3558 pub retired: u8,
3559 #[bits(7)]
3560 pub reserved: u8,
3561}
3562
3563#[repr(C)]
3564#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3565pub struct HvAarch64PendingHypercallOutputEvent {
3566 pub header: HvAarch64PendingEventHeader,
3567 pub flags: HvAarch64PendingHypercallOutputEventFlags,
3568 pub reserved: u16,
3569 pub output_size: u32,
3570 pub output_gpa: u64,
3571}
3572
3573#[repr(C)]
3574#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3575pub struct HvAarch64PendingEvent {
3576 pub header: HvAarch64PendingEventHeader,
3577 pub event_data: [u8; 15],
3578 pub _padding: [u64; 2],
3579}
3580
3581#[bitfield(u32)]
3582#[derive(PartialEq, Eq, IntoBytes, Immutable, KnownLayout, FromBytes)]
3583pub struct HvMapGpaFlags {
3584 pub readable: bool,
3585 pub writable: bool,
3586 pub kernel_executable: bool,
3587 pub user_executable: bool,
3588 pub supervisor_shadow_stack: bool,
3589 pub paging_writability: bool,
3590 pub verify_paging_writability: bool,
3591 #[bits(8)]
3592 _padding0: u32,
3593 pub adjustable: bool,
3594 #[bits(16)]
3595 _padding1: u32,
3596}
3597
3598pub const HV_MAP_GPA_PERMISSIONS_NONE: HvMapGpaFlags = HvMapGpaFlags::new();
3600pub const HV_MAP_GPA_PERMISSIONS_ALL: HvMapGpaFlags = HvMapGpaFlags::new()
3601 .with_readable(true)
3602 .with_writable(true)
3603 .with_kernel_executable(true)
3604 .with_user_executable(true);
3605
3606#[repr(C)]
3607#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3608pub struct HvMonitorPage {
3609 pub trigger_state: HvMonitorTriggerState,
3610 pub reserved1: u32,
3611 pub trigger_group: [HvMonitorTriggerGroup; 4],
3612 pub reserved2: [u64; 3],
3613 pub next_check_time: [[u32; 32]; 4],
3614 pub latency: [[u16; 32]; 4],
3615 pub reserved3: [u64; 32],
3616 pub parameter: [[HvMonitorParameter; 32]; 4],
3617 pub reserved4: [u8; 1984],
3618}
3619
3620#[repr(C)]
3621#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3622pub struct HvMonitorPageSmall {
3623 pub trigger_state: HvMonitorTriggerState,
3624 pub reserved1: u32,
3625 pub trigger_group: [HvMonitorTriggerGroup; 4],
3626}
3627
3628#[repr(C)]
3629#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3630pub struct HvMonitorTriggerGroup {
3631 pub pending: u32,
3632 pub armed: u32,
3633}
3634
3635#[repr(C)]
3636#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3637pub struct HvMonitorParameter {
3638 pub connection_id: u32,
3639 pub flag_number: u16,
3640 pub reserved: u16,
3641}
3642
3643#[bitfield(u32)]
3644#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3645pub struct HvMonitorTriggerState {
3646 #[bits(4)]
3647 pub group_enable: u32,
3648 #[bits(28)]
3649 pub reserved: u32,
3650}
3651
3652#[bitfield(u64)]
3653#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3654pub struct HvPmTimerInfo {
3655 #[bits(16)]
3656 pub port: u16,
3657 #[bits(1)]
3658 pub width_24: bool,
3659 #[bits(1)]
3660 pub enabled: bool,
3661 #[bits(14)]
3662 pub reserved1: u32,
3663 #[bits(32)]
3664 pub reserved2: u32,
3665}
3666
3667#[bitfield(u64)]
3668#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3669pub struct HvX64RegisterSevControl {
3670 pub enable_encrypted_state: bool,
3671 #[bits(11)]
3672 _rsvd1: u64,
3673 #[bits(52)]
3674 pub vmsa_gpa_page_number: u64,
3675}
3676
3677#[bitfield(u64)]
3678#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3679pub struct HvRegisterReferenceTsc {
3680 pub enable: bool,
3681 #[bits(11)]
3682 pub reserved_p: u64,
3683 #[bits(52)]
3684 pub gpn: u64,
3685}
3686
3687#[repr(C)]
3688#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3689pub struct HvReferenceTscPage {
3690 pub tsc_sequence: u32,
3691 pub reserved1: u32,
3692 pub tsc_scale: u64,
3693 pub tsc_offset: i64,
3694 pub timeline_bias: u64,
3695 pub tsc_multiplier: u64,
3696 pub reserved2: [u64; 507],
3697}
3698
3699pub const HV_REFERENCE_TSC_SEQUENCE_INVALID: u32 = 0;
3700
3701#[bitfield(u64)]
3702#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3703pub struct HvX64VmgexitInterceptMessageFlags {
3704 pub ghcb_page_valid: bool,
3705 pub ghcb_request_error: bool,
3706 #[bits(62)]
3707 _reserved: u64,
3708}
3709
3710#[repr(C)]
3711#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3712pub struct HvX64VmgexitInterceptMessageGhcbPageStandard {
3713 pub ghcb_protocol_version: u16,
3714 _reserved: [u16; 3],
3715 pub sw_exit_code: u64,
3716 pub sw_exit_info1: u64,
3717 pub sw_exit_info2: u64,
3718 pub sw_scratch: u64,
3719}
3720
3721#[repr(C)]
3722#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3723pub struct HvX64VmgexitInterceptMessageGhcbPage {
3724 pub ghcb_usage: u32,
3725 _reserved: u32,
3726 pub standard: HvX64VmgexitInterceptMessageGhcbPageStandard,
3727}
3728
3729#[repr(C)]
3730#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3731pub struct HvX64VmgexitInterceptMessage {
3732 pub header: HvX64InterceptMessageHeader,
3733 pub ghcb_msr: u64,
3734 pub flags: HvX64VmgexitInterceptMessageFlags,
3735 pub ghcb_page: HvX64VmgexitInterceptMessageGhcbPage,
3736}
3737
3738impl MessagePayload for HvX64VmgexitInterceptMessage {}
3739
3740#[bitfield(u64)]
3741pub struct HvRegisterVpAssistPage {
3742 pub enabled: bool,
3743 #[bits(11)]
3744 _reserved: u64,
3745 #[bits(52)]
3746 pub gpa_page_number: u64,
3747}
3748
3749#[bitfield(u32)]
3750#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3751pub struct HvX64RegisterPageDirtyFlags {
3752 pub general_purpose: bool,
3753 pub instruction_pointer: bool,
3754 pub xmm: bool,
3755 pub segments: bool,
3756 pub flags: bool,
3757 #[bits(27)]
3758 reserved: u32,
3759}
3760
3761#[repr(C)]
3762#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3763pub struct HvX64RegisterPage {
3764 pub version: u16,
3765 pub is_valid: u8,
3766 pub vtl: u8,
3767 pub dirty: HvX64RegisterPageDirtyFlags,
3768 pub gp_registers: [u64; 16],
3769 pub rip: u64,
3770 pub rflags: u64,
3771 pub reserved: u64,
3772 pub xmm: [u128; 6],
3773 pub segment: [u128; 6],
3774 pub cr0: u64,
3776 pub cr3: u64,
3777 pub cr4: u64,
3778 pub cr8: u64,
3779 pub efer: u64,
3780 pub dr7: u64,
3781 pub pending_interruption: HvX64PendingInterruptionRegister,
3782 pub interrupt_state: HvX64InterruptStateRegister,
3783 pub instruction_emulation_hints: HvInstructionEmulatorHintsRegister,
3784 pub reserved_end: [u8; 3672],
3785}
3786
3787const _: () = assert!(size_of::<HvX64RegisterPage>() == HV_PAGE_SIZE_USIZE);
3788
3789#[bitfield(u32)]
3790#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3791pub struct HvAarch64RegisterPageDirtyFlags {
3792 _unused: bool,
3793 pub instruction_pointer: bool,
3794 pub processor_state: bool,
3795 pub control_registers: bool,
3796 #[bits(28)]
3797 reserved: u32,
3798}
3799
3800#[repr(C)]
3801#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3802pub struct HvAarch64RegisterPage {
3803 pub version: u16,
3804 pub is_valid: u8,
3805 pub vtl: u8,
3806 pub dirty: HvAarch64RegisterPageDirtyFlags,
3807 pub _rsvd: [u64; 33],
3809 pub pc: u64,
3811 pub cpsr: u64,
3813 pub sctlr_el1: u64,
3815 pub tcr_el1: u64,
3816 pub reserved_end: [u8; 3792],
3818}
3819
3820const _: () = assert!(size_of::<HvAarch64RegisterPage>() == HV_PAGE_SIZE_USIZE);
3821
3822#[bitfield(u64)]
3823pub struct HvRegisterVsmWpWaitForTlbLock {
3824 pub wait: bool,
3825 #[bits(63)]
3826 _reserved: u64,
3827}
3828
3829#[bitfield(u64)]
3830pub struct HvRegisterVsmVpSecureVtlConfig {
3831 pub mbec_enabled: bool,
3832 pub tlb_locked: bool,
3833 pub supervisor_shadow_stack_enabled: bool,
3834 pub hardware_hvpt_enabled: bool,
3835 #[bits(60)]
3836 _reserved: u64,
3837}
3838
3839#[bitfield(u64)]
3840pub struct HvRegisterCrInterceptControl {
3841 pub cr0_write: bool,
3842 pub cr4_write: bool,
3843 pub xcr0_write: bool,
3844 pub ia32_misc_enable_read: bool,
3845 pub ia32_misc_enable_write: bool,
3846 pub msr_lstar_read: bool,
3847 pub msr_lstar_write: bool,
3848 pub msr_star_read: bool,
3849 pub msr_star_write: bool,
3850 pub msr_cstar_read: bool,
3851 pub msr_cstar_write: bool,
3852 pub apic_base_msr_read: bool,
3853 pub apic_base_msr_write: bool,
3854 pub msr_efer_read: bool,
3855 pub msr_efer_write: bool,
3856 pub gdtr_write: bool,
3857 pub idtr_write: bool,
3858 pub ldtr_write: bool,
3859 pub tr_write: bool,
3860 pub msr_sysenter_cs_write: bool,
3861 pub msr_sysenter_eip_write: bool,
3862 pub msr_sysenter_esp_write: bool,
3863 pub msr_sfmask_write: bool,
3864 pub msr_tsc_aux_write: bool,
3865 pub msr_sgx_launch_control_write: bool,
3866 pub msr_xss_write: bool,
3867 pub msr_scet_write: bool,
3868 pub msr_pls_ssp_write: bool,
3869 pub msr_interrupt_ssp_table_addr_write: bool,
3870 #[bits(35)]
3871 _rsvd_z: u64,
3872}