1#![expect(missing_docs)]
7#![forbid(unsafe_code)]
8#![no_std]
9
10pub mod vbs;
11
12use bitfield_struct::bitfield;
13use core::fmt::Debug;
14use core::mem::size_of;
15use open_enum::open_enum;
16use static_assertions::const_assert;
17use zerocopy::FromBytes;
18use zerocopy::FromZeros;
19use zerocopy::Immutable;
20use zerocopy::IntoBytes;
21use zerocopy::KnownLayout;
22
23pub const HV_PAGE_SIZE: u64 = 4096;
24pub const HV_PAGE_SIZE_USIZE: usize = 4096;
25pub const HV_PAGE_SHIFT: u64 = 12;
26
27pub const HV_PARTITION_ID_SELF: u64 = u64::MAX;
28pub const HV_VP_INDEX_SELF: u32 = 0xfffffffe;
29
30pub const HV_CPUID_FUNCTION_VERSION_AND_FEATURES: u32 = 0x00000001;
31pub const HV_CPUID_FUNCTION_HV_VENDOR_AND_MAX_FUNCTION: u32 = 0x40000000;
32pub const HV_CPUID_FUNCTION_HV_INTERFACE: u32 = 0x40000001;
33pub const HV_CPUID_FUNCTION_MS_HV_VERSION: u32 = 0x40000002;
34pub const HV_CPUID_FUNCTION_MS_HV_FEATURES: u32 = 0x40000003;
35pub const HV_CPUID_FUNCTION_MS_HV_ENLIGHTENMENT_INFORMATION: u32 = 0x40000004;
36pub const HV_CPUID_FUNCTION_MS_HV_IMPLEMENTATION_LIMITS: u32 = 0x40000005;
37pub const HV_CPUID_FUNCTION_MS_HV_HARDWARE_FEATURES: u32 = 0x40000006;
38pub const HV_CPUID_FUNCTION_MS_HV_ISOLATION_CONFIGURATION: u32 = 0x4000000C;
39
40pub const VIRTUALIZATION_STACK_CPUID_VENDOR: u32 = 0x40000080;
41pub const VIRTUALIZATION_STACK_CPUID_INTERFACE: u32 = 0x40000081;
42pub const VIRTUALIZATION_STACK_CPUID_PROPERTIES: u32 = 0x40000082;
43
44pub const VS1_PARTITION_PROPERTIES_EAX_IS_PORTABLE: u32 = 0x000000001;
49pub const VS1_PARTITION_PROPERTIES_EAX_DEBUG_DEVICE_PRESENT: u32 = 0x000000002;
51pub const VS1_PARTITION_PROPERTIES_EAX_EXTENDED_IOAPIC_RTE: u32 = 0x000000004;
53pub const VS1_PARTITION_PROPERTIES_EAX_CONFIDENTIAL_VMBUS_AVAILABLE: u32 = 0x000000008;
55
56pub const VENDOR_HYP_UID_MS_HYPERVISOR: [u32; 4] = [0x4d32ba58, 0xcd244764, 0x8eef6c75, 0x16597024];
58
59#[bitfield(u64)]
60pub struct HvPartitionPrivilege {
61 pub access_vp_runtime_msr: bool,
63 pub access_partition_reference_counter: bool,
64 pub access_synic_msrs: bool,
65 pub access_synthetic_timer_msrs: bool,
66 pub access_apic_msrs: bool,
67 pub access_hypercall_msrs: bool,
68 pub access_vp_index: bool,
69 pub access_reset_msr: bool,
70 pub access_stats_msr: bool,
71 pub access_partition_reference_tsc: bool,
72 pub access_guest_idle_msr: bool,
73 pub access_frequency_msrs: bool,
74 pub access_debug_msrs: bool,
75 pub access_reenlightenment_ctrls: bool,
76 pub access_root_scheduler_msr: bool,
77 pub access_tsc_invariant_controls: bool,
78 _reserved1: u16,
79
80 pub create_partitions: bool,
82 pub access_partition_id: bool,
83 pub access_memory_pool: bool,
84 pub adjust_message_buffers: bool,
85 pub post_messages: bool,
86 pub signal_events: bool,
87 pub create_port: bool,
88 pub connect_port: bool,
89 pub access_stats: bool,
90 #[bits(2)]
91 _reserved2: u64,
92 pub debugging: bool,
93 pub cpu_management: bool,
94 pub configure_profiler: bool,
95 pub access_vp_exit_tracing: bool,
96 pub enable_extended_gva_ranges_flush_va_list: bool,
97 pub access_vsm: bool,
98 pub access_vp_registers: bool,
99 _unused_bit: bool,
100 pub fast_hypercall_output: bool,
101 pub enable_extended_hypercalls: bool,
102 pub start_virtual_processor: bool,
103 pub isolation: bool,
104 #[bits(9)]
105 _reserved3: u64,
106}
107
108open_enum! {
109 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
110 pub enum HvPartitionIsolationType: u8 {
111 NONE = 0,
112 VBS = 1,
113 SNP = 2,
114 TDX = 3,
115 }
116}
117
118#[bitfield(u128)]
119#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
120pub struct HvFeatures {
121 #[bits(64)]
122 pub privileges: HvPartitionPrivilege,
123
124 #[bits(4)]
125 pub max_supported_cstate: u32,
126 pub hpet_needed_for_c3_power_state_deprecated: bool,
127 pub invariant_mperf_available: bool,
128 pub supervisor_shadow_stack_available: bool,
129 pub arch_pmu_available: bool,
130 pub exception_trap_intercept_available: bool,
131 #[bits(23)]
132 reserved: u32,
133
134 pub mwait_available_deprecated: bool,
135 pub guest_debugging_available: bool,
136 pub performance_monitors_available: bool,
137 pub cpu_dynamic_partitioning_available: bool,
138 pub xmm_registers_for_fast_hypercall_available: bool,
139 pub guest_idle_available: bool,
140 pub hypervisor_sleep_state_support_available: bool,
141 pub numa_distance_query_available: bool,
142 pub frequency_regs_available: bool,
143 pub synthetic_machine_check_available: bool,
144 pub guest_crash_regs_available: bool,
145 pub debug_regs_available: bool,
146 pub npiep1_available: bool,
147 pub disable_hypervisor_available: bool,
148 pub extended_gva_ranges_for_flush_virtual_address_list_available: bool,
149 pub fast_hypercall_output_available: bool,
150 pub svm_features_available: bool,
151 pub sint_polling_mode_available: bool,
152 pub hypercall_msr_lock_available: bool,
153 pub direct_synthetic_timers: bool,
154 pub register_pat_available: bool,
155 pub register_bndcfgs_available: bool,
156 pub watchdog_timer_available: bool,
157 pub synthetic_time_unhalted_timer_available: bool,
158 pub device_domains_available: bool, pub s1_device_domains_available: bool, pub lbr_available: bool,
161 pub ipt_available: bool,
162 pub cross_vtl_flush_available: bool,
163 pub idle_spec_ctrl_available: bool,
164 pub translate_gva_flags_available: bool,
165 pub apic_eoi_intercept_available: bool,
166}
167
168impl HvFeatures {
169 pub fn from_cpuid(cpuid: [u32; 4]) -> Self {
170 zerocopy::transmute!(cpuid)
171 }
172
173 pub fn into_cpuid(self) -> [u32; 4] {
174 zerocopy::transmute!(self)
175 }
176}
177
178#[bitfield(u128)]
179pub struct HvEnlightenmentInformation {
180 pub use_hypercall_for_address_space_switch: bool,
181 pub use_hypercall_for_local_flush: bool,
182 pub use_hypercall_for_remote_flush_and_local_flush_entire: bool,
183 pub use_apic_msrs: bool,
184 pub use_hv_register_for_reset: bool,
185 pub use_relaxed_timing: bool,
186 pub use_dma_remapping_deprecated: bool,
187 pub use_interrupt_remapping_deprecated: bool,
188 pub use_x2_apic_msrs: bool,
189 pub deprecate_auto_eoi: bool,
190 pub use_synthetic_cluster_ipi: bool,
191 pub use_ex_processor_masks: bool,
192 pub nested: bool,
193 pub use_int_for_mbec_system_calls: bool,
194 pub use_vmcs_enlightenments: bool,
195 pub use_synced_timeline: bool,
196 pub core_scheduler_requested: bool,
197 pub use_direct_local_flush_entire: bool,
198 pub no_non_architectural_core_sharing: bool,
199 pub use_x2_apic: bool,
200 pub restore_time_on_resume: bool,
201 pub use_hypercall_for_mmio_access: bool,
202 pub use_gpa_pinning_hypercall: bool,
203 pub wake_vps: bool,
204 _reserved: u8,
205 pub long_spin_wait_count: u32,
206 #[bits(7)]
207 pub implemented_physical_address_bits: u32,
208 #[bits(25)]
209 _reserved1: u32,
210 _reserved2: u32,
211}
212
213#[bitfield(u128)]
214pub struct HvHardwareFeatures {
215 pub apic_overlay_assist_in_use: bool,
216 pub msr_bitmaps_in_use: bool,
217 pub architectural_performance_counters_in_use: bool,
218 pub second_level_address_translation_in_use: bool,
219 pub dma_remapping_in_use: bool,
220 pub interrupt_remapping_in_use: bool,
221 pub memory_patrol_scrubber_present: bool,
222 pub dma_protection_in_use: bool,
223 pub hpet_requested: bool,
224 pub synthetic_timers_volatile: bool,
225 #[bits(4)]
226 pub hypervisor_level: u32,
227 pub physical_destination_mode_required: bool,
228 pub use_vmfunc_for_alias_map_switch: bool,
229 pub hv_register_for_memory_zeroing_supported: bool,
230 pub unrestricted_guest_supported: bool,
231 pub rdt_afeatures_supported: bool,
232 pub rdt_mfeatures_supported: bool,
233 pub child_perfmon_pmu_supported: bool,
234 pub child_perfmon_lbr_supported: bool,
235 pub child_perfmon_ipt_supported: bool,
236 pub apic_emulation_supported: bool,
237 pub child_x2_apic_recommended: bool,
238 pub hardware_watchdog_reserved: bool,
239 pub device_access_tracking_supported: bool,
240 pub hardware_gpa_access_tracking_supported: bool,
241 #[bits(4)]
242 _reserved: u32,
243
244 pub device_domain_input_width: u8,
245 #[bits(24)]
246 _reserved1: u32,
247 _reserved2: u32,
248 _reserved3: u32,
249}
250
251#[bitfield(u128)]
252pub struct HvIsolationConfiguration {
253 pub paravisor_present: bool,
254 #[bits(31)]
255 pub _reserved0: u32,
256
257 #[bits(4)]
258 pub isolation_type: u8,
259 _reserved11: bool,
260 pub shared_gpa_boundary_active: bool,
261 #[bits(6)]
262 pub shared_gpa_boundary_bits: u8,
263 #[bits(20)]
264 _reserved12: u32,
265 _reserved2: u32,
266 _reserved3: u32,
267}
268
269open_enum! {
270 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
271 pub enum HypercallCode: u16 {
272 #![expect(non_upper_case_globals)]
273
274 HvCallSwitchVirtualAddressSpace = 0x0001,
275 HvCallFlushVirtualAddressSpace = 0x0002,
276 HvCallFlushVirtualAddressList = 0x0003,
277 HvCallNotifyLongSpinWait = 0x0008,
278 HvCallInvokeHypervisorDebugger = 0x000a,
279 HvCallSendSyntheticClusterIpi = 0x000b,
280 HvCallModifyVtlProtectionMask = 0x000c,
281 HvCallEnablePartitionVtl = 0x000d,
282 HvCallEnableVpVtl = 0x000f,
283 HvCallVtlCall = 0x0011,
284 HvCallVtlReturn = 0x0012,
285 HvCallFlushVirtualAddressSpaceEx = 0x0013,
286 HvCallFlushVirtualAddressListEx = 0x0014,
287 HvCallSendSyntheticClusterIpiEx = 0x0015,
288 HvCallInstallIntercept = 0x004d,
289 HvCallGetVpRegisters = 0x0050,
290 HvCallSetVpRegisters = 0x0051,
291 HvCallTranslateVirtualAddress = 0x0052,
292 HvCallPostMessage = 0x005C,
293 HvCallSignalEvent = 0x005D,
294 HvCallOutputDebugCharacter = 0x0071,
295 HvCallGetSystemProperty = 0x007b,
296 HvCallRetargetDeviceInterrupt = 0x007e,
297 HvCallNotifyPartitionEvent = 0x0087,
298 HvCallAssertVirtualInterrupt = 0x0094,
299 HvCallStartVirtualProcessor = 0x0099,
300 HvCallGetVpIndexFromApicId = 0x009A,
301 HvCallTranslateVirtualAddressEx = 0x00AC,
302 HvCallCheckForIoIntercept = 0x00ad,
303 HvCallFlushGuestPhysicalAddressSpace = 0x00AF,
304 HvCallFlushGuestPhysicalAddressList = 0x00B0,
305 HvCallSignalEventDirect = 0x00C0,
306 HvCallPostMessageDirect = 0x00C1,
307 HvCallCheckSparseGpaPageVtlAccess = 0x00D4,
308 HvCallAcceptGpaPages = 0x00D9,
309 HvCallModifySparseGpaPageHostVisibility = 0x00DB,
310 HvCallMemoryMappedIoRead = 0x0106,
311 HvCallMemoryMappedIoWrite = 0x0107,
312 HvCallPinGpaPageRanges = 0x0112,
313 HvCallUnpinGpaPageRanges = 0x0113,
314 HvCallQuerySparseGpaPageHostVisibility = 0x011C,
315
316 HvExtCallQueryCapabilities = 0x8001,
318
319 HvCallVbsVmCallReport = 0xC001,
321 }
322}
323
324pub const HV_X64_MSR_GUEST_OS_ID: u32 = 0x40000000;
325pub const HV_X64_MSR_HYPERCALL: u32 = 0x40000001;
326pub const HV_X64_MSR_VP_INDEX: u32 = 0x40000002;
327pub const HV_X64_MSR_TIME_REF_COUNT: u32 = 0x40000020;
328pub const HV_X64_MSR_REFERENCE_TSC: u32 = 0x40000021;
329pub const HV_X64_MSR_TSC_FREQUENCY: u32 = 0x40000022;
330pub const HV_X64_MSR_APIC_FREQUENCY: u32 = 0x40000023;
331pub const HV_X64_MSR_EOI: u32 = 0x40000070;
332pub const HV_X64_MSR_ICR: u32 = 0x40000071;
333pub const HV_X64_MSR_TPR: u32 = 0x40000072;
334pub const HV_X64_MSR_VP_ASSIST_PAGE: u32 = 0x40000073;
335pub const HV_X64_MSR_SCONTROL: u32 = 0x40000080;
336pub const HV_X64_MSR_SVERSION: u32 = 0x40000081;
337pub const HV_X64_MSR_SIEFP: u32 = 0x40000082;
338pub const HV_X64_MSR_SIMP: u32 = 0x40000083;
339pub const HV_X64_MSR_EOM: u32 = 0x40000084;
340pub const HV_X64_MSR_SINT0: u32 = 0x40000090;
341pub const HV_X64_MSR_SINT1: u32 = 0x40000091;
342pub const HV_X64_MSR_SINT2: u32 = 0x40000092;
343pub const HV_X64_MSR_SINT3: u32 = 0x40000093;
344pub const HV_X64_MSR_SINT4: u32 = 0x40000094;
345pub const HV_X64_MSR_SINT5: u32 = 0x40000095;
346pub const HV_X64_MSR_SINT6: u32 = 0x40000096;
347pub const HV_X64_MSR_SINT7: u32 = 0x40000097;
348pub const HV_X64_MSR_SINT8: u32 = 0x40000098;
349pub const HV_X64_MSR_SINT9: u32 = 0x40000099;
350pub const HV_X64_MSR_SINT10: u32 = 0x4000009a;
351pub const HV_X64_MSR_SINT11: u32 = 0x4000009b;
352pub const HV_X64_MSR_SINT12: u32 = 0x4000009c;
353pub const HV_X64_MSR_SINT13: u32 = 0x4000009d;
354pub const HV_X64_MSR_SINT14: u32 = 0x4000009e;
355pub const HV_X64_MSR_SINT15: u32 = 0x4000009f;
356pub const HV_X64_MSR_STIMER0_CONFIG: u32 = 0x400000b0;
357pub const HV_X64_MSR_STIMER0_COUNT: u32 = 0x400000b1;
358pub const HV_X64_MSR_STIMER1_CONFIG: u32 = 0x400000b2;
359pub const HV_X64_MSR_STIMER1_COUNT: u32 = 0x400000b3;
360pub const HV_X64_MSR_STIMER2_CONFIG: u32 = 0x400000b4;
361pub const HV_X64_MSR_STIMER2_COUNT: u32 = 0x400000b5;
362pub const HV_X64_MSR_STIMER3_CONFIG: u32 = 0x400000b6;
363pub const HV_X64_MSR_STIMER3_COUNT: u32 = 0x400000b7;
364pub const HV_X64_MSR_GUEST_IDLE: u32 = 0x400000F0;
365pub const HV_X64_MSR_GUEST_CRASH_P0: u32 = 0x40000100;
366pub const HV_X64_MSR_GUEST_CRASH_P1: u32 = 0x40000101;
367pub const HV_X64_MSR_GUEST_CRASH_P2: u32 = 0x40000102;
368pub const HV_X64_MSR_GUEST_CRASH_P3: u32 = 0x40000103;
369pub const HV_X64_MSR_GUEST_CRASH_P4: u32 = 0x40000104;
370pub const HV_X64_MSR_GUEST_CRASH_CTL: u32 = 0x40000105;
371
372pub const HV_X64_GUEST_CRASH_PARAMETER_MSRS: usize = 5;
373
374#[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, PartialEq, Eq)]
378#[repr(transparent)]
379pub struct HvStatus(pub u16);
380
381impl HvStatus {
382 pub const SUCCESS: Self = Self(0);
384
385 pub fn result(self) -> HvResult<()> {
388 if let Ok(err) = self.0.try_into() {
389 Err(HvError(err))
390 } else {
391 Ok(())
392 }
393 }
394
395 pub fn is_ok(self) -> bool {
397 self == Self::SUCCESS
398 }
399
400 pub fn is_err(self) -> bool {
402 self != Self::SUCCESS
403 }
404
405 const fn from_bits(bits: u16) -> Self {
406 Self(bits)
407 }
408
409 const fn into_bits(self) -> u16 {
410 self.0
411 }
412}
413
414impl From<Result<(), HvError>> for HvStatus {
415 fn from(err: Result<(), HvError>) -> Self {
416 err.err().map_or(Self::SUCCESS, |err| Self(err.0.get()))
417 }
418}
419
420impl Debug for HvStatus {
421 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
422 match self.result() {
423 Ok(()) => f.write_str("Success"),
424 Err(err) => Debug::fmt(&err, f),
425 }
426 }
427}
428
429#[derive(Copy, Clone, PartialEq, Eq, IntoBytes, Immutable, KnownLayout)]
434#[repr(transparent)]
435pub struct HvError(core::num::NonZeroU16);
436
437impl From<core::num::NonZeroU16> for HvError {
438 fn from(err: core::num::NonZeroU16) -> Self {
439 Self(err)
440 }
441}
442
443impl Debug for HvError {
444 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
445 match self.debug_name() {
446 Some(name) => f.pad(name),
447 None => Debug::fmt(&self.0.get(), f),
448 }
449 }
450}
451
452impl core::fmt::Display for HvError {
453 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
454 match self.doc_str() {
455 Some(s) => f.write_str(s),
456 None => write!(f, "Hypervisor error {:#06x}", self.0),
457 }
458 }
459}
460
461impl core::error::Error for HvError {}
462
463macro_rules! hv_error {
464 ($ty:ty, $(#[doc = $doc:expr] $ident:ident = $val:expr),* $(,)?) => {
465
466 #[expect(non_upper_case_globals)]
467 impl $ty {
468 $(
469 #[doc = $doc]
470 pub const $ident: Self = Self(core::num::NonZeroU16::new($val).unwrap());
471 )*
472
473 fn debug_name(&self) -> Option<&'static str> {
474 Some(match self.0.get() {
475 $(
476 $val => stringify!($ident),
477 )*
478 _ => return None,
479 })
480 }
481
482 fn doc_str(&self) -> Option<&'static str> {
483 Some(match self.0.get() {
484 $(
485 $val => const { $doc.trim_ascii() },
486 )*
487 _ => return None,
488 })
489 }
490 }
491 };
492}
493
494hv_error! {
496 HvError,
497 InvalidHypercallCode = 0x0002,
499 InvalidHypercallInput = 0x0003,
501 InvalidAlignment = 0x0004,
503 InvalidParameter = 0x0005,
505 AccessDenied = 0x0006,
507 InvalidPartitionState = 0x0007,
509 OperationDenied = 0x0008,
511 UnknownProperty = 0x0009,
513 PropertyValueOutOfRange = 0x000A,
515 InsufficientMemory = 0x000B,
517 PartitionTooDeep = 0x000C,
519 InvalidPartitionId = 0x000D,
521 InvalidVpIndex = 0x000E,
523 NotFound = 0x0010,
525 InvalidPortId = 0x0011,
527 InvalidConnectionId = 0x0012,
529 InsufficientBuffers = 0x0013,
531 NotAcknowledged = 0x0014,
533 InvalidVpState = 0x0015,
535 Acknowledged = 0x0016,
537 InvalidSaveRestoreState = 0x0017,
539 InvalidSynicState = 0x0018,
541 ObjectInUse = 0x0019,
543 InvalidProximityDomainInfo = 0x001A,
545 NoData = 0x001B,
547 Inactive = 0x001C,
549 NoResources = 0x001D,
551 FeatureUnavailable = 0x001E,
553 PartialPacket = 0x001F,
555 ProcessorFeatureNotSupported = 0x0020,
557 ProcessorCacheLineFlushSizeIncompatible = 0x0030,
559 InsufficientBuffer = 0x0033,
561 IncompatibleProcessor = 0x0037,
563 InsufficientDeviceDomains = 0x0038,
565 CpuidFeatureValidationError = 0x003C,
567 CpuidXsaveFeatureValidationError = 0x003D,
569 ProcessorStartupTimeout = 0x003E,
571 SmxEnabled = 0x003F,
573 InvalidLpIndex = 0x0041,
575 InvalidRegisterValue = 0x0050,
577 InvalidVtlState = 0x0051,
579 NxNotDetected = 0x0055,
581 InvalidDeviceId = 0x0057,
583 InvalidDeviceState = 0x0058,
585 PendingPageRequests = 0x0059,
587 PageRequestInvalid = 0x0060,
589 KeyAlreadyExists = 0x0065,
591 DeviceAlreadyInDomain = 0x0066,
593 InvalidCpuGroupId = 0x006F,
595 InvalidCpuGroupState = 0x0070,
597 OperationFailed = 0x0071,
599 NotAllowedWithNestedVirtActive = 0x0072,
601 InsufficientRootMemory = 0x0073,
603 EventBufferAlreadyFreed = 0x0074,
605 Timeout = 0x0078,
607 VtlAlreadyEnabled = 0x0086,
609 UnknownRegisterName = 0x0087,
611}
612
613pub type HvResult<T> = Result<T, HvError>;
615
616#[repr(u8)]
617#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
618pub enum Vtl {
619 Vtl0 = 0,
620 Vtl1 = 1,
621 Vtl2 = 2,
622}
623
624impl TryFrom<u8> for Vtl {
625 type Error = HvError;
626
627 fn try_from(value: u8) -> Result<Self, Self::Error> {
628 Ok(match value {
629 0 => Self::Vtl0,
630 1 => Self::Vtl1,
631 2 => Self::Vtl2,
632 _ => return Err(HvError::InvalidParameter),
633 })
634 }
635}
636
637impl From<Vtl> for u8 {
638 fn from(value: Vtl) -> Self {
639 value as u8
640 }
641}
642
643#[bitfield(u64)]
645pub struct GuestCrashCtl {
646 #[bits(58)]
647 _reserved: u64,
648 #[bits(3)]
650 pub pre_os_id: u8,
651 #[bits(1)]
653 pub no_crash_dump: bool,
654 #[bits(1)]
657 pub crash_message: bool,
658 #[bits(1)]
660 pub crash_notify: bool,
661}
662
663#[repr(C, align(16))]
664#[derive(Copy, Clone, PartialEq, Eq, IntoBytes, Immutable, KnownLayout, FromBytes)]
665pub struct AlignedU128([u8; 16]);
666
667impl AlignedU128 {
668 pub fn as_ne_bytes(&self) -> [u8; 16] {
669 self.0
670 }
671
672 pub fn from_ne_bytes(val: [u8; 16]) -> Self {
673 Self(val)
674 }
675}
676
677impl Debug for AlignedU128 {
678 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
679 Debug::fmt(&u128::from_ne_bytes(self.0), f)
680 }
681}
682
683impl From<u128> for AlignedU128 {
684 fn from(v: u128) -> Self {
685 Self(v.to_ne_bytes())
686 }
687}
688
689impl From<u64> for AlignedU128 {
690 fn from(v: u64) -> Self {
691 (v as u128).into()
692 }
693}
694
695impl From<u32> for AlignedU128 {
696 fn from(v: u32) -> Self {
697 (v as u128).into()
698 }
699}
700
701impl From<u16> for AlignedU128 {
702 fn from(v: u16) -> Self {
703 (v as u128).into()
704 }
705}
706
707impl From<u8> for AlignedU128 {
708 fn from(v: u8) -> Self {
709 (v as u128).into()
710 }
711}
712
713impl From<AlignedU128> for u128 {
714 fn from(v: AlignedU128) -> Self {
715 u128::from_ne_bytes(v.0)
716 }
717}
718
719open_enum! {
720 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
721 pub enum HvMessageType: u32 {
722 #![expect(non_upper_case_globals)]
723
724 HvMessageTypeNone = 0x00000000,
725
726 HvMessageTypeUnmappedGpa = 0x80000000,
727 HvMessageTypeGpaIntercept = 0x80000001,
728 HvMessageTypeUnacceptedGpa = 0x80000003,
729 HvMessageTypeGpaAttributeIntercept = 0x80000004,
730 HvMessageTypeEnablePartitionVtlIntercept = 0x80000005,
731 HvMessageTypeTimerExpired = 0x80000010,
732 HvMessageTypeInvalidVpRegisterValue = 0x80000020,
733 HvMessageTypeUnrecoverableException = 0x80000021,
734 HvMessageTypeUnsupportedFeature = 0x80000022,
735 HvMessageTypeTlbPageSizeMismatch = 0x80000023,
736 HvMessageTypeIommuFault = 0x80000024,
737 HvMessageTypeEventLogBufferComplete = 0x80000040,
738 HvMessageTypeHypercallIntercept = 0x80000050,
739 HvMessageTypeSynicEventIntercept = 0x80000060,
740 HvMessageTypeSynicSintIntercept = 0x80000061,
741 HvMessageTypeSynicSintDeliverable = 0x80000062,
742 HvMessageTypeAsyncCallCompletion = 0x80000070,
743 HvMessageTypeX64IoPortIntercept = 0x80010000,
744 HvMessageTypeMsrIntercept = 0x80010001,
745 HvMessageTypeX64CpuidIntercept = 0x80010002,
746 HvMessageTypeExceptionIntercept = 0x80010003,
747 HvMessageTypeX64ApicEoi = 0x80010004,
748 HvMessageTypeX64IommuPrq = 0x80010005,
749 HvMessageTypeRegisterIntercept = 0x80010006,
750 HvMessageTypeX64Halt = 0x80010007,
751 HvMessageTypeX64InterruptionDeliverable = 0x80010008,
752 HvMessageTypeX64SipiIntercept = 0x80010009,
753 HvMessageTypeX64RdtscIntercept = 0x8001000a,
754 HvMessageTypeX64ApicSmiIntercept = 0x8001000b,
755 HvMessageTypeArm64ResetIntercept = 0x8001000c,
756 HvMessageTypeX64ApicInitSipiIntercept = 0x8001000d,
757 HvMessageTypeX64ApicWriteIntercept = 0x8001000e,
758 HvMessageTypeX64ProxyInterruptIntercept = 0x8001000f,
759 HvMessageTypeX64IsolationCtrlRegIntercept = 0x80010010,
760 HvMessageTypeX64SnpGuestRequestIntercept = 0x80010011,
761 HvMessageTypeX64ExceptionTrapIntercept = 0x80010012,
762 HvMessageTypeX64SevVmgexitIntercept = 0x80010013,
763 }
764}
765
766impl Default for HvMessageType {
767 fn default() -> Self {
768 HvMessageType::HvMessageTypeNone
769 }
770}
771
772pub const HV_SYNIC_INTERCEPTION_SINT_INDEX: u8 = 0;
773
774pub const NUM_SINTS: usize = 16;
775pub const NUM_TIMERS: usize = 4;
776
777#[repr(C)]
778#[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
779pub struct HvMessageHeader {
780 pub typ: HvMessageType,
781 pub len: u8,
782 pub flags: HvMessageFlags,
783 pub rsvd: u16,
784 pub id: u64,
785}
786
787#[bitfield(u8)]
788#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
789pub struct HvMessageFlags {
790 pub message_pending: bool,
791 #[bits(7)]
792 _reserved: u8,
793}
794
795pub const HV_MESSAGE_SIZE: usize = size_of::<HvMessage>();
796const_assert!(HV_MESSAGE_SIZE == 256);
797pub const HV_MESSAGE_PAYLOAD_SIZE: usize = 240;
798
799#[repr(C, align(16))]
800#[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
801pub struct HvMessage {
802 pub header: HvMessageHeader,
803 pub payload_buffer: [u8; HV_MESSAGE_PAYLOAD_SIZE],
804}
805
806impl Default for HvMessage {
807 fn default() -> Self {
808 Self {
809 header: FromZeros::new_zeroed(),
810 payload_buffer: [0; 240],
811 }
812 }
813}
814
815impl HvMessage {
816 pub fn new(typ: HvMessageType, id: u64, payload: &[u8]) -> Self {
819 let mut msg = HvMessage {
820 header: HvMessageHeader {
821 typ,
822 len: payload.len() as u8,
823 flags: HvMessageFlags::new(),
824 rsvd: 0,
825 id,
826 },
827 payload_buffer: [0; 240],
828 };
829 msg.payload_buffer[..payload.len()].copy_from_slice(payload);
830 msg
831 }
832
833 pub fn payload(&self) -> &[u8] {
834 &self.payload_buffer[..self.header.len as usize]
835 }
836
837 pub fn as_message<T: MessagePayload>(&self) -> &T {
838 let () = T::CHECK;
840 T::ref_from_prefix(&self.payload_buffer).unwrap().0
841 }
842
843 pub fn as_message_mut<T: MessagePayload>(&mut self) -> &T {
844 let () = T::CHECK;
846 T::mut_from_prefix(&mut self.payload_buffer).unwrap().0
847 }
848}
849
850pub trait MessagePayload: KnownLayout + Immutable + IntoBytes + FromBytes + Sized {
851 #[doc(hidden)]
854 const CHECK: () = {
855 assert!(size_of::<Self>() <= HV_MESSAGE_PAYLOAD_SIZE);
856 assert!(align_of::<Self>() <= align_of::<HvMessage>());
857 };
858}
859
860#[repr(C)]
861#[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
862pub struct TimerMessagePayload {
863 pub timer_index: u32,
864 pub reserved: u32,
865 pub expiration_time: u64,
866 pub delivery_time: u64,
867}
868
869pub mod hypercall {
870 use super::*;
871 use core::ops::RangeInclusive;
872 use zerocopy::Unalign;
873
874 #[bitfield(u64)]
876 pub struct Control {
877 pub code: u16,
879 pub fast: bool,
881 #[bits(10)]
883 pub variable_header_size: usize,
884 #[bits(4)]
885 _rsvd0: u8,
886 pub nested: bool,
888 #[bits(12)]
890 pub rep_count: usize,
891 #[bits(4)]
892 _rsvd1: u8,
893 #[bits(12)]
895 pub rep_start: usize,
896 #[bits(4)]
897 _rsvd2: u8,
898 }
899
900 #[bitfield(u64)]
902 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
903 #[must_use]
904 pub struct HypercallOutput {
905 #[bits(16)]
906 pub call_status: HvStatus,
907 pub rsvd: u16,
908 #[bits(12)]
909 pub elements_processed: usize,
910 #[bits(20)]
911 pub rsvd2: u32,
912 }
913
914 impl From<HvError> for HypercallOutput {
915 fn from(e: HvError) -> Self {
916 Self::new().with_call_status(Err(e).into())
917 }
918 }
919
920 impl HypercallOutput {
921 pub const SUCCESS: Self = Self::new();
923
924 pub fn result(&self) -> Result<(), HvError> {
925 self.call_status().result()
926 }
927 }
928
929 #[repr(C)]
930 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
931 pub struct HvRegisterAssoc {
932 pub name: HvRegisterName,
933 pub pad: [u32; 3],
934 pub value: HvRegisterValue,
935 }
936
937 impl<N: Into<HvRegisterName>, T: Into<HvRegisterValue>> From<(N, T)> for HvRegisterAssoc {
938 fn from((name, value): (N, T)) -> Self {
939 Self {
940 name: name.into(),
941 pad: [0; 3],
942 value: value.into(),
943 }
944 }
945 }
946
947 impl<N: Copy + Into<HvRegisterName>, T: Copy + Into<HvRegisterValue>> From<&(N, T)>
948 for HvRegisterAssoc
949 {
950 fn from(&(name, value): &(N, T)) -> Self {
951 Self {
952 name: name.into(),
953 pad: [0; 3],
954 value: value.into(),
955 }
956 }
957 }
958
959 #[bitfield(u64)]
960 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
961 pub struct MsrHypercallContents {
962 pub enable: bool,
963 pub locked: bool,
964 #[bits(10)]
965 pub reserved_p: u64,
966 #[bits(52)]
967 pub gpn: u64,
968 }
969
970 #[repr(C, align(8))]
971 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
972 pub struct PostMessage {
973 pub connection_id: u32,
974 pub padding: u32,
975 pub message_type: u32,
976 pub payload_size: u32,
977 pub payload: [u8; 240],
978 }
979
980 #[repr(C, align(8))]
981 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
982 pub struct SignalEvent {
983 pub connection_id: u32,
984 pub flag_number: u16,
985 pub rsvd: u16,
986 }
987
988 #[repr(C)]
989 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
990 pub struct PostMessageDirect {
991 pub partition_id: u64,
992 pub vp_index: u32,
993 pub vtl: u8,
994 pub padding0: [u8; 3],
995 pub sint: u8,
996 pub padding1: [u8; 3],
997 pub message: Unalign<HvMessage>,
998 pub padding2: u32,
999 }
1000
1001 #[repr(C)]
1002 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1003 pub struct SignalEventDirect {
1004 pub target_partition: u64,
1005 pub target_vp: u32,
1006 pub target_vtl: u8,
1007 pub target_sint: u8,
1008 pub flag_number: u16,
1009 }
1010
1011 #[repr(C)]
1012 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1013 pub struct SignalEventDirectOutput {
1014 pub newly_signaled: u8,
1015 pub rsvd: [u8; 7],
1016 }
1017
1018 #[repr(C)]
1019 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1020 pub struct InterruptEntry {
1021 pub source: HvInterruptSource,
1022 pub rsvd: u32,
1023 pub data: [u32; 2],
1024 }
1025
1026 open_enum! {
1027 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1028 pub enum HvInterruptSource: u32 {
1029 MSI = 1,
1030 IO_APIC = 2,
1031 }
1032 }
1033
1034 #[repr(C)]
1035 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1036 pub struct InterruptTarget {
1037 pub vector: u32,
1038 pub flags: HvInterruptTargetFlags,
1039 pub mask_or_format: u64,
1040 }
1041
1042 #[bitfield(u32)]
1043 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1044 pub struct HvInterruptTargetFlags {
1045 pub multicast: bool,
1046 pub processor_set: bool,
1047 #[bits(30)]
1048 pub reserved: u32,
1049 }
1050
1051 pub const HV_DEVICE_INTERRUPT_TARGET_MULTICAST: u32 = 1;
1052 pub const HV_DEVICE_INTERRUPT_TARGET_PROCESSOR_SET: u32 = 2;
1053
1054 pub const HV_GENERIC_SET_SPARSE_4K: u64 = 0;
1055 pub const HV_GENERIC_SET_ALL: u64 = 1;
1056
1057 #[repr(C)]
1058 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1059 pub struct RetargetDeviceInterrupt {
1060 pub partition_id: u64,
1061 pub device_id: u64,
1062 pub entry: InterruptEntry,
1063 pub rsvd: u64,
1064 pub target_header: InterruptTarget,
1065 }
1066
1067 #[bitfield(u8)]
1068 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1069 pub struct HvInputVtl {
1070 #[bits(4)]
1071 pub target_vtl_value: u8,
1072 pub use_target_vtl: bool,
1073 #[bits(3)]
1074 pub reserved: u8,
1075 }
1076
1077 impl From<Vtl> for HvInputVtl {
1078 fn from(value: Vtl) -> Self {
1079 Self::from(Some(value))
1080 }
1081 }
1082
1083 impl From<Option<Vtl>> for HvInputVtl {
1084 fn from(value: Option<Vtl>) -> Self {
1085 Self::new()
1086 .with_use_target_vtl(value.is_some())
1087 .with_target_vtl_value(value.map_or(0, Into::into))
1088 }
1089 }
1090
1091 impl HvInputVtl {
1092 pub fn target_vtl(&self) -> Result<Option<Vtl>, HvError> {
1094 if self.reserved() != 0 {
1095 return Err(HvError::InvalidParameter);
1096 }
1097 if self.use_target_vtl() {
1098 Ok(Some(self.target_vtl_value().try_into()?))
1099 } else {
1100 Ok(None)
1101 }
1102 }
1103
1104 pub const CURRENT_VTL: Self = Self::new();
1105 }
1106
1107 #[repr(C)]
1108 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1109 pub struct GetSetVpRegisters {
1110 pub partition_id: u64,
1111 pub vp_index: u32,
1112 pub target_vtl: HvInputVtl,
1113 pub rsvd: [u8; 3],
1114 }
1115
1116 open_enum::open_enum! {
1117 #[derive(Default)]
1118 pub enum HvGuestOsMicrosoftIds: u8 {
1119 UNDEFINED = 0x00,
1120 MSDOS = 0x01,
1121 WINDOWS_3X = 0x02,
1122 WINDOWS_9X = 0x03,
1123 WINDOWS_NT = 0x04,
1124 WINDOWS_CE = 0x05,
1125 }
1126 }
1127
1128 #[bitfield(u64)]
1129 pub struct HvGuestOsMicrosoft {
1130 #[bits(40)]
1131 _rsvd: u64,
1132 #[bits(8)]
1133 pub os_id: u8,
1134 #[bits(16)]
1136 pub vendor_id: u16,
1137 }
1138
1139 open_enum::open_enum! {
1140 #[derive(Default)]
1141 pub enum HvGuestOsOpenSourceType: u8 {
1142 UNDEFINED = 0x00,
1143 LINUX = 0x01,
1144 FREEBSD = 0x02,
1145 XEN = 0x03,
1146 ILLUMOS = 0x04,
1147 }
1148 }
1149
1150 #[bitfield(u64)]
1151 pub struct HvGuestOsOpenSource {
1152 #[bits(16)]
1153 pub build_no: u16,
1154 #[bits(32)]
1155 pub version: u32,
1156 #[bits(8)]
1157 pub os_id: u8,
1158 #[bits(7)]
1159 pub os_type: u8,
1160 #[bits(1)]
1161 pub is_open_source: bool,
1162 }
1163
1164 #[bitfield(u64)]
1165 pub struct HvGuestOsId {
1166 #[bits(63)]
1167 _rsvd: u64,
1168 is_open_source: bool,
1169 }
1170
1171 impl HvGuestOsId {
1172 pub fn microsoft(&self) -> Option<HvGuestOsMicrosoft> {
1173 (!self.is_open_source()).then(|| HvGuestOsMicrosoft::from(u64::from(*self)))
1174 }
1175
1176 pub fn open_source(&self) -> Option<HvGuestOsOpenSource> {
1177 (self.is_open_source()).then(|| HvGuestOsOpenSource::from(u64::from(*self)))
1178 }
1179
1180 pub fn as_u64(&self) -> u64 {
1181 self.0
1182 }
1183 }
1184
1185 pub const HV_INTERCEPT_ACCESS_MASK_NONE: u32 = 0x00;
1186 pub const HV_INTERCEPT_ACCESS_MASK_READ: u32 = 0x01;
1187 pub const HV_INTERCEPT_ACCESS_MASK_WRITE: u32 = 0x02;
1188 pub const HV_INTERCEPT_ACCESS_MASK_READ_WRITE: u32 =
1189 HV_INTERCEPT_ACCESS_MASK_READ | HV_INTERCEPT_ACCESS_MASK_WRITE;
1190 pub const HV_INTERCEPT_ACCESS_MASK_EXECUTE: u32 = 0x04;
1191
1192 open_enum::open_enum! {
1193 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1194 pub enum HvInterceptType: u32 {
1195 #![expect(non_upper_case_globals)]
1196 HvInterceptTypeX64IoPort = 0x00000000,
1197 HvInterceptTypeX64Msr = 0x00000001,
1198 HvInterceptTypeX64Cpuid = 0x00000002,
1199 HvInterceptTypeException = 0x00000003,
1200 HvInterceptTypeHypercall = 0x00000008,
1201 HvInterceptTypeUnknownSynicConnection = 0x0000000D,
1202 HvInterceptTypeX64ApicEoi = 0x0000000E,
1203 HvInterceptTypeRetargetInterruptWithUnknownDeviceId = 0x0000000F,
1204 HvInterceptTypeX64IoPortRange = 0x00000011,
1205 }
1206 }
1207
1208 #[repr(transparent)]
1209 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, Debug)]
1210 pub struct HvInterceptParameters(u64);
1211
1212 impl HvInterceptParameters {
1213 pub fn new_io_port(port: u16) -> Self {
1214 Self(port as u64)
1215 }
1216
1217 pub fn new_io_port_range(ports: RangeInclusive<u16>) -> Self {
1218 let base = *ports.start() as u64;
1219 let end = *ports.end() as u64;
1220 Self(base | (end << 16))
1221 }
1222
1223 pub fn new_exception(vector: u16) -> Self {
1224 Self(vector as u64)
1225 }
1226
1227 pub fn io_port(&self) -> u16 {
1228 self.0 as u16
1229 }
1230
1231 pub fn io_port_range(&self) -> RangeInclusive<u16> {
1232 let base = self.0 as u16;
1233 let end = (self.0 >> 16) as u16;
1234 base..=end
1235 }
1236
1237 pub fn cpuid_index(&self) -> u32 {
1238 self.0 as u32
1239 }
1240
1241 pub fn exception(&self) -> u16 {
1242 self.0 as u16
1243 }
1244 }
1245
1246 #[repr(C)]
1247 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, Debug)]
1248 pub struct InstallIntercept {
1249 pub partition_id: u64,
1250 pub access_type_mask: u32,
1251 pub intercept_type: HvInterceptType,
1252 pub intercept_parameters: HvInterceptParameters,
1253 }
1254
1255 #[repr(C)]
1256 #[derive(Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes, Debug)]
1257 pub struct AssertVirtualInterrupt {
1258 pub partition_id: u64,
1259 pub interrupt_control: HvInterruptControl,
1260 pub destination_address: u64,
1261 pub requested_vector: u32,
1262 pub target_vtl: u8,
1263 pub rsvd0: u8,
1264 pub rsvd1: u16,
1265 }
1266
1267 #[repr(C)]
1268 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1269 pub struct StartVirtualProcessorX64 {
1270 pub partition_id: u64,
1271 pub vp_index: u32,
1272 pub target_vtl: u8,
1273 pub rsvd0: u8,
1274 pub rsvd1: u16,
1275 pub vp_context: InitialVpContextX64,
1276 }
1277
1278 #[repr(C)]
1279 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1280 pub struct InitialVpContextX64 {
1281 pub rip: u64,
1282 pub rsp: u64,
1283 pub rflags: u64,
1284 pub cs: HvX64SegmentRegister,
1285 pub ds: HvX64SegmentRegister,
1286 pub es: HvX64SegmentRegister,
1287 pub fs: HvX64SegmentRegister,
1288 pub gs: HvX64SegmentRegister,
1289 pub ss: HvX64SegmentRegister,
1290 pub tr: HvX64SegmentRegister,
1291 pub ldtr: HvX64SegmentRegister,
1292 pub idtr: HvX64TableRegister,
1293 pub gdtr: HvX64TableRegister,
1294 pub efer: u64,
1295 pub cr0: u64,
1296 pub cr3: u64,
1297 pub cr4: u64,
1298 pub msr_cr_pat: u64,
1299 }
1300
1301 #[repr(C)]
1302 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1303 pub struct StartVirtualProcessorArm64 {
1304 pub partition_id: u64,
1305 pub vp_index: u32,
1306 pub target_vtl: u8,
1307 pub rsvd0: u8,
1308 pub rsvd1: u16,
1309 pub vp_context: InitialVpContextArm64,
1310 }
1311
1312 #[repr(C)]
1313 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1314 pub struct InitialVpContextArm64 {
1315 pub pc: u64,
1316 pub sp_elh: u64,
1317 pub sctlr_el1: u64,
1318 pub mair_el1: u64,
1319 pub tcr_el1: u64,
1320 pub vbar_el1: u64,
1321 pub ttbr0_el1: u64,
1322 pub ttbr1_el1: u64,
1323 pub x18: u64,
1324 }
1325
1326 impl InitialVpContextX64 {
1327 pub fn as_hv_register_assocs(&self) -> impl Iterator<Item = HvRegisterAssoc> + '_ {
1328 let regs = [
1329 (HvX64RegisterName::Rip, HvRegisterValue::from(self.rip)).into(),
1330 (HvX64RegisterName::Rsp, HvRegisterValue::from(self.rsp)).into(),
1331 (
1332 HvX64RegisterName::Rflags,
1333 HvRegisterValue::from(self.rflags),
1334 )
1335 .into(),
1336 (HvX64RegisterName::Cs, HvRegisterValue::from(self.cs)).into(),
1337 (HvX64RegisterName::Ds, HvRegisterValue::from(self.ds)).into(),
1338 (HvX64RegisterName::Es, HvRegisterValue::from(self.es)).into(),
1339 (HvX64RegisterName::Fs, HvRegisterValue::from(self.fs)).into(),
1340 (HvX64RegisterName::Gs, HvRegisterValue::from(self.gs)).into(),
1341 (HvX64RegisterName::Ss, HvRegisterValue::from(self.ss)).into(),
1342 (HvX64RegisterName::Tr, HvRegisterValue::from(self.tr)).into(),
1343 (HvX64RegisterName::Ldtr, HvRegisterValue::from(self.ldtr)).into(),
1344 (HvX64RegisterName::Idtr, HvRegisterValue::from(self.idtr)).into(),
1345 (HvX64RegisterName::Gdtr, HvRegisterValue::from(self.gdtr)).into(),
1346 (HvX64RegisterName::Efer, HvRegisterValue::from(self.efer)).into(),
1347 (HvX64RegisterName::Cr0, HvRegisterValue::from(self.cr0)).into(),
1348 (HvX64RegisterName::Cr3, HvRegisterValue::from(self.cr3)).into(),
1349 (HvX64RegisterName::Cr4, HvRegisterValue::from(self.cr4)).into(),
1350 (
1351 HvX64RegisterName::Pat,
1352 HvRegisterValue::from(self.msr_cr_pat),
1353 )
1354 .into(),
1355 ];
1356 regs.into_iter()
1357 }
1358 }
1359
1360 #[bitfield(u64)]
1361 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1362 pub struct TranslateGvaControlFlagsX64 {
1363 pub validate_read: bool,
1365 pub validate_write: bool,
1367 pub validate_execute: bool,
1369 pub privilege_exempt: bool,
1372 pub set_page_table_bits: bool,
1374 pub tlb_flush_inhibit: bool,
1376 pub supervisor_access: bool,
1378 pub user_access: bool,
1380 pub enforce_smap: bool,
1384 pub override_smap: bool,
1387 pub shadow_stack: bool,
1389 #[bits(45)]
1390 _unused: u64,
1391 input_vtl_value: u8,
1393 }
1394
1395 impl TranslateGvaControlFlagsX64 {
1396 pub fn input_vtl(&self) -> HvInputVtl {
1397 self.input_vtl_value().into()
1398 }
1399
1400 pub fn with_input_vtl(self, input_vtl: HvInputVtl) -> Self {
1401 self.with_input_vtl_value(input_vtl.into())
1402 }
1403
1404 pub fn set_input_vtl(&mut self, input_vtl: HvInputVtl) {
1405 self.set_input_vtl_value(input_vtl.into())
1406 }
1407 }
1408
1409 #[bitfield(u64)]
1410 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1411 pub struct TranslateGvaControlFlagsArm64 {
1412 pub validate_read: bool,
1414 pub validate_write: bool,
1416 pub validate_execute: bool,
1418 _reserved0: bool,
1419 pub set_page_table_bits: bool,
1421 pub tlb_flush_inhibit: bool,
1423 pub supervisor_access: bool,
1425 pub user_access: bool,
1427 pub pan_set: bool,
1430 pub pan_clear: bool,
1433 #[bits(46)]
1434 _unused: u64,
1435 #[bits(8)]
1437 input_vtl_value: u8,
1438 }
1439
1440 impl TranslateGvaControlFlagsArm64 {
1441 pub fn input_vtl(&self) -> HvInputVtl {
1442 self.input_vtl_value().into()
1443 }
1444
1445 pub fn with_input_vtl(self, input_vtl: HvInputVtl) -> Self {
1446 self.with_input_vtl_value(input_vtl.into())
1447 }
1448
1449 pub fn set_input_vtl(&mut self, input_vtl: HvInputVtl) {
1450 self.set_input_vtl_value(input_vtl.into())
1451 }
1452 }
1453
1454 #[repr(C)]
1455 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1456 pub struct TranslateVirtualAddressX64 {
1457 pub partition_id: u64,
1458 pub vp_index: u32,
1459 pub reserved: u32,
1461 pub control_flags: TranslateGvaControlFlagsX64,
1462 pub gva_page: u64,
1463 }
1464
1465 #[repr(C)]
1466 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1467 pub struct TranslateVirtualAddressArm64 {
1468 pub partition_id: u64,
1469 pub vp_index: u32,
1470 pub reserved: u32,
1472 pub control_flags: TranslateGvaControlFlagsArm64,
1473 pub gva_page: u64,
1474 }
1475
1476 open_enum::open_enum! {
1477 pub enum TranslateGvaResultCode: u32 {
1478 SUCCESS = 0,
1479
1480 PAGE_NOT_PRESENT = 1,
1482 PRIVILEGE_VIOLATION = 2,
1483 INVALID_PAGE_TABLE_FLAGS = 3,
1484
1485 GPA_UNMAPPED = 4,
1487 GPA_NO_READ_ACCESS = 5,
1488 GPA_NO_WRITE_ACCESS = 6,
1489 GPA_ILLEGAL_OVERLAY_ACCESS = 7,
1490
1491 INTERCEPT = 8,
1495
1496 GPA_UNACCEPTED = 9,
1497 }
1498 }
1499
1500 #[bitfield(u64)]
1501 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1502 pub struct TranslateGvaResult {
1503 pub result_code: u32,
1504 pub cache_type: u8,
1505 pub overlay_page: bool,
1506 #[bits(23)]
1507 pub reserved: u32,
1508 }
1509
1510 #[repr(C)]
1511 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1512 pub struct TranslateVirtualAddressOutput {
1513 pub translation_result: TranslateGvaResult,
1514 pub gpa_page: u64,
1515 }
1516
1517 #[repr(C)]
1518 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1519 pub struct TranslateGvaResultExX64 {
1520 pub result: TranslateGvaResult,
1521 pub reserved: u64,
1522 pub event_info: HvX64PendingEvent,
1523 }
1524
1525 const_assert!(size_of::<TranslateGvaResultExX64>() == 0x30);
1526
1527 #[repr(C)]
1528 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1529 pub struct TranslateGvaResultExArm64 {
1530 pub result: TranslateGvaResult,
1531 }
1532
1533 const_assert!(size_of::<TranslateGvaResultExArm64>() == 0x8);
1534
1535 #[repr(C)]
1536 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1537 pub struct TranslateVirtualAddressExOutputX64 {
1538 pub translation_result: TranslateGvaResultExX64,
1539 pub gpa_page: u64,
1540 pub reserved: u64,
1542 }
1543
1544 const_assert!(size_of::<TranslateVirtualAddressExOutputX64>() == 0x40);
1545
1546 #[repr(C)]
1547 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1548 pub struct TranslateVirtualAddressExOutputArm64 {
1549 pub translation_result: TranslateGvaResultExArm64,
1550 pub gpa_page: u64,
1551 }
1552
1553 const_assert!(size_of::<TranslateVirtualAddressExOutputArm64>() == 0x10);
1554
1555 #[repr(C)]
1556 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1557 pub struct GetVpIndexFromApicId {
1558 pub partition_id: u64,
1559 pub target_vtl: u8,
1560 pub reserved: [u8; 7],
1561 }
1562
1563 #[repr(C)]
1564 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1565 pub struct EnableVpVtlX64 {
1566 pub partition_id: u64,
1567 pub vp_index: u32,
1568 pub target_vtl: u8,
1569 pub reserved: [u8; 3],
1570 pub vp_vtl_context: InitialVpContextX64,
1571 }
1572
1573 #[repr(C)]
1574 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1575 pub struct EnableVpVtlArm64 {
1576 pub partition_id: u64,
1577 pub vp_index: u32,
1578 pub target_vtl: u8,
1579 pub reserved: [u8; 3],
1580 pub vp_vtl_context: InitialVpContextArm64,
1581 }
1582
1583 #[repr(C)]
1584 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1585 pub struct ModifyVtlProtectionMask {
1586 pub partition_id: u64,
1587 pub map_flags: HvMapGpaFlags,
1588 pub target_vtl: HvInputVtl,
1589 pub reserved: [u8; 3],
1590 }
1591
1592 #[repr(C)]
1593 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1594 pub struct CheckSparseGpaPageVtlAccess {
1595 pub partition_id: u64,
1596 pub target_vtl: HvInputVtl,
1597 pub desired_access: u8,
1598 pub reserved0: u16,
1599 pub reserved1: u32,
1600 }
1601 const_assert!(size_of::<CheckSparseGpaPageVtlAccess>() == 0x10);
1602
1603 #[bitfield(u64)]
1604 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1605 pub struct CheckSparseGpaPageVtlAccessOutput {
1606 pub result_code: u8,
1607 pub denied_access: u8,
1608 #[bits(4)]
1609 pub intercepting_vtl: u32,
1610 #[bits(12)]
1611 _reserved0: u32,
1612 _reserved1: u32,
1613 }
1614 const_assert!(size_of::<CheckSparseGpaPageVtlAccessOutput>() == 0x8);
1615
1616 open_enum::open_enum! {
1617 pub enum CheckGpaPageVtlAccessResultCode: u32 {
1618 SUCCESS = 0,
1619 MEMORY_INTERCEPT = 1,
1620 }
1621 }
1622
1623 pub const HV_VTL_PERMISSION_SET_SIZE: usize = 2;
1625
1626 #[repr(C)]
1627 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1628 pub struct VtlPermissionSet {
1629 pub vtl_permission_from_1: [u16; HV_VTL_PERMISSION_SET_SIZE],
1631 }
1632
1633 open_enum::open_enum! {
1634 pub enum AcceptMemoryType: u32 {
1635 ANY = 0,
1636 RAM = 1,
1637 }
1638 }
1639
1640 open_enum! {
1641 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1648 pub enum HostVisibilityType: u8 {
1649 PRIVATE = 0,
1650 SHARED = 3,
1651 }
1652 }
1653
1654 impl HostVisibilityType {
1656 const fn from_bits(value: u8) -> Self {
1657 Self(value)
1658 }
1659
1660 const fn into_bits(value: Self) -> u8 {
1661 value.0
1662 }
1663 }
1664
1665 #[bitfield(u32)]
1667 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1668 pub struct AcceptPagesAttributes {
1669 #[bits(6)]
1670 pub memory_type: u32,
1672 #[bits(2)]
1673 pub host_visibility: HostVisibilityType,
1675 #[bits(3)]
1676 pub vtl_set: u32,
1678 #[bits(21)]
1679 _reserved: u32,
1680 }
1681
1682 #[repr(C)]
1683 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1684 pub struct AcceptGpaPages {
1685 pub partition_id: u64,
1687 pub page_attributes: AcceptPagesAttributes,
1690 pub vtl_permission_set: VtlPermissionSet,
1692 pub gpa_page_base: u64,
1694 }
1695 const_assert!(size_of::<AcceptGpaPages>() == 0x18);
1696
1697 #[bitfield(u32)]
1699 pub struct UnacceptPagesAttributes {
1700 #[bits(3)]
1701 pub vtl_set: u32,
1702 #[bits(29)]
1703 _reserved: u32,
1704 }
1705
1706 #[repr(C)]
1707 pub struct UnacceptGpaPages {
1708 pub partition_id: u64,
1710 pub page_attributes: UnacceptPagesAttributes,
1712 pub vtl_permission_set: VtlPermissionSet,
1714 pub gpa_page_base: u64,
1716 }
1717 const_assert!(size_of::<UnacceptGpaPages>() == 0x18);
1718
1719 #[bitfield(u32)]
1720 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1721 pub struct ModifyHostVisibility {
1722 #[bits(2)]
1723 pub host_visibility: HostVisibilityType,
1724 #[bits(30)]
1725 _reserved: u32,
1726 }
1727
1728 #[repr(C)]
1729 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1730 pub struct ModifySparsePageVisibility {
1731 pub partition_id: u64,
1732 pub host_visibility: ModifyHostVisibility,
1733 pub reserved: u32,
1734 }
1735
1736 #[repr(C)]
1737 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1738 pub struct QuerySparsePageVisibility {
1739 pub partition_id: u64,
1740 }
1741
1742 pub const VBS_VM_REPORT_DATA_SIZE: usize = 64;
1743 pub const VBS_VM_MAX_REPORT_SIZE: usize = 2048;
1744
1745 #[repr(C)]
1746 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1747 pub struct VbsVmCallReport {
1748 pub report_data: [u8; VBS_VM_REPORT_DATA_SIZE],
1749 }
1750
1751 #[repr(C)]
1752 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1753 pub struct VbsVmCallReportOutput {
1754 pub report: [u8; VBS_VM_MAX_REPORT_SIZE],
1755 }
1756
1757 #[bitfield(u8)]
1758 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1759 pub struct EnablePartitionVtlFlags {
1760 pub enable_mbec: bool,
1761 pub enable_supervisor_shadow_stack: bool,
1762 pub enable_hardware_hvpt: bool,
1763 #[bits(5)]
1764 pub reserved: u8,
1765 }
1766
1767 #[repr(C)]
1768 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1769 pub struct EnablePartitionVtl {
1770 pub partition_id: u64,
1771 pub target_vtl: u8,
1772 pub flags: EnablePartitionVtlFlags,
1773 pub reserved_z0: u16,
1774 pub reserved_z1: u32,
1775 }
1776
1777 #[repr(C)]
1778 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1779 pub struct FlushVirtualAddressSpace {
1780 pub address_space: u64,
1781 pub flags: HvFlushFlags,
1782 pub processor_mask: u64,
1783 }
1784
1785 #[repr(C)]
1786 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1787 pub struct FlushVirtualAddressSpaceEx {
1788 pub address_space: u64,
1789 pub flags: HvFlushFlags,
1790 pub vp_set_format: u64,
1791 pub vp_set_valid_banks_mask: u64,
1792 }
1794
1795 #[repr(C)]
1796 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1797 pub struct PinUnpinGpaPageRangesHeader {
1798 pub reserved: u64,
1799 }
1800
1801 #[repr(C)]
1802 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1803 pub struct SendSyntheticClusterIpi {
1804 pub vector: u32,
1805 pub target_vtl: HvInputVtl,
1806 pub flags: u8,
1807 pub reserved: u16,
1808 pub processor_mask: u64,
1809 }
1810
1811 #[repr(C)]
1812 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1813 pub struct SendSyntheticClusterIpiEx {
1814 pub vector: u32,
1815 pub target_vtl: HvInputVtl,
1816 pub flags: u8,
1817 pub reserved: u16,
1818 pub vp_set_format: u64,
1819 pub vp_set_valid_banks_mask: u64,
1820 }
1822
1823 #[bitfield(u64)]
1824 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1825 pub struct HvFlushFlags {
1826 pub all_processors: bool,
1827 pub all_virtual_address_spaces: bool,
1828 pub non_global_mappings_only: bool,
1829 pub use_extended_range_format: bool,
1830 pub use_target_vtl: bool,
1831
1832 #[bits(3)]
1833 _reserved: u8,
1834
1835 pub target_vtl0: bool,
1836 pub target_vtl1: bool,
1837
1838 #[bits(54)]
1839 _reserved2: u64,
1840 }
1841
1842 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1843 #[repr(transparent)]
1844 pub struct HvGvaRange(pub u64);
1845
1846 impl From<u64> for HvGvaRange {
1847 fn from(value: u64) -> Self {
1848 Self(value)
1849 }
1850 }
1851
1852 impl From<HvGvaRange> for u64 {
1853 fn from(value: HvGvaRange) -> Self {
1854 value.0
1855 }
1856 }
1857
1858 impl HvGvaRange {
1859 pub fn as_simple(self) -> HvGvaRangeSimple {
1860 HvGvaRangeSimple(self.0)
1861 }
1862
1863 pub fn as_extended(self) -> HvGvaRangeExtended {
1864 HvGvaRangeExtended(self.0)
1865 }
1866
1867 pub fn as_extended_large_page(self) -> HvGvaRangeExtendedLargePage {
1868 HvGvaRangeExtendedLargePage(self.0)
1869 }
1870 }
1871
1872 #[bitfield(u64)]
1873 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1874 pub struct HvGvaRangeSimple {
1875 #[bits(12)]
1877 pub additional_pages: u64,
1878 #[bits(52)]
1880 pub gva_page_number: u64,
1881 }
1882
1883 #[bitfield(u64)]
1884 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1885 pub struct HvGvaRangeExtended {
1886 #[bits(11)]
1888 pub additional_pages: u64,
1889 pub large_page: bool,
1891 #[bits(52)]
1893 pub gva_page_number: u64,
1894 }
1895
1896 #[bitfield(u64)]
1897 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1898 pub struct HvGvaRangeExtendedLargePage {
1899 #[bits(11)]
1901 pub additional_pages: u64,
1902 pub large_page: bool,
1904 pub page_size: bool,
1908 #[bits(8)]
1909 _reserved: u64,
1910 #[bits(43)]
1912 pub gva_large_page_number: u64,
1913 }
1914
1915 #[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
1916 #[repr(transparent)]
1917 pub struct HvGpaRange(pub u64);
1918
1919 impl HvGpaRange {
1920 pub fn as_simple(self) -> HvGpaRangeSimple {
1921 HvGpaRangeSimple(self.0)
1922 }
1923
1924 pub fn as_extended(self) -> HvGpaRangeExtended {
1925 HvGpaRangeExtended(self.0)
1926 }
1927
1928 pub fn as_extended_large_page(self) -> HvGpaRangeExtendedLargePage {
1929 HvGpaRangeExtendedLargePage(self.0)
1930 }
1931 }
1932
1933 #[bitfield(u64)]
1934 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1935 pub struct HvGpaRangeSimple {
1936 #[bits(12)]
1938 pub additional_pages: u64,
1939 #[bits(52)]
1941 pub gpa_page_number: u64,
1942 }
1943
1944 #[bitfield(u64)]
1945 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1946 pub struct HvGpaRangeExtended {
1947 #[bits(11)]
1949 pub additional_pages: u64,
1950 pub large_page: bool,
1952 #[bits(52)]
1954 pub gpa_page_number: u64,
1955 }
1956
1957 #[bitfield(u64)]
1958 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
1959 pub struct HvGpaRangeExtendedLargePage {
1960 #[bits(11)]
1962 pub additional_pages: u64,
1963 pub large_page: bool,
1965 pub page_size: bool,
1969 #[bits(8)]
1970 _reserved: u64,
1971 #[bits(43)]
1973 pub gpa_large_page_number: u64,
1974 }
1975
1976 pub const HV_HYPERCALL_MMIO_MAX_DATA_LENGTH: usize = 64;
1977
1978 #[repr(C)]
1979 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1980 pub struct MemoryMappedIoRead {
1981 pub gpa: u64,
1982 pub access_width: u32,
1983 pub reserved_z0: u32,
1984 }
1985
1986 #[repr(C)]
1987 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1988 pub struct MemoryMappedIoReadOutput {
1989 pub data: [u8; HV_HYPERCALL_MMIO_MAX_DATA_LENGTH],
1990 }
1991
1992 #[repr(C)]
1993 #[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
1994 pub struct MemoryMappedIoWrite {
1995 pub gpa: u64,
1996 pub access_width: u32,
1997 pub reserved_z0: u32,
1998 pub data: [u8; HV_HYPERCALL_MMIO_MAX_DATA_LENGTH],
1999 }
2000}
2001
2002macro_rules! registers {
2003 ($name:ident {
2004 $(
2005 $(#[$vattr:meta])*
2006 $variant:ident = $value:expr
2007 ),*
2008 $(,)?
2009 }) => {
2010 open_enum! {
2011 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2012 pub enum $name: u32 {
2013 #![expect(non_upper_case_globals)]
2014 $($variant = $value,)*
2015 InstructionEmulationHints = 0x00000002,
2016 InternalActivityState = 0x00000004,
2017
2018 GuestCrashP0 = 0x00000210,
2020 GuestCrashP1 = 0x00000211,
2021 GuestCrashP2 = 0x00000212,
2022 GuestCrashP3 = 0x00000213,
2023 GuestCrashP4 = 0x00000214,
2024 GuestCrashCtl = 0x00000215,
2025
2026 PendingInterruption = 0x00010002,
2027 InterruptState = 0x00010003,
2028 PendingEvent0 = 0x00010004,
2029 PendingEvent1 = 0x00010005,
2030 DeliverabilityNotifications = 0x00010006,
2031
2032 GicrBaseGpa = 0x00063000,
2033
2034 VpRuntime = 0x00090000,
2035 GuestOsId = 0x00090002,
2036 VpIndex = 0x00090003,
2037 TimeRefCount = 0x00090004,
2038 CpuManagementVersion = 0x00090007,
2039 VpAssistPage = 0x00090013,
2040 VpRootSignalCount = 0x00090014,
2041 ReferenceTsc = 0x00090017,
2042 VpConfig = 0x00090018,
2043 Ghcb = 0x00090019,
2044 ReferenceTscSequence = 0x0009001A,
2045 GuestSchedulerEvent = 0x0009001B,
2046
2047 Sint0 = 0x000A0000,
2048 Sint1 = 0x000A0001,
2049 Sint2 = 0x000A0002,
2050 Sint3 = 0x000A0003,
2051 Sint4 = 0x000A0004,
2052 Sint5 = 0x000A0005,
2053 Sint6 = 0x000A0006,
2054 Sint7 = 0x000A0007,
2055 Sint8 = 0x000A0008,
2056 Sint9 = 0x000A0009,
2057 Sint10 = 0x000A000A,
2058 Sint11 = 0x000A000B,
2059 Sint12 = 0x000A000C,
2060 Sint13 = 0x000A000D,
2061 Sint14 = 0x000A000E,
2062 Sint15 = 0x000A000F,
2063 Scontrol = 0x000A0010,
2064 Sversion = 0x000A0011,
2065 Sifp = 0x000A0012,
2066 Sipp = 0x000A0013,
2067 Eom = 0x000A0014,
2068 Sirbp = 0x000A0015,
2069
2070 Stimer0Config = 0x000B0000,
2071 Stimer0Count = 0x000B0001,
2072 Stimer1Config = 0x000B0002,
2073 Stimer1Count = 0x000B0003,
2074 Stimer2Config = 0x000B0004,
2075 Stimer2Count = 0x000B0005,
2076 Stimer3Config = 0x000B0006,
2077 Stimer3Count = 0x000B0007,
2078 StimeUnhaltedTimerConfig = 0x000B0100,
2079 StimeUnhaltedTimerCount = 0x000B0101,
2080
2081 VsmCodePageOffsets = 0x000D0002,
2082 VsmVpStatus = 0x000D0003,
2083 VsmPartitionStatus = 0x000D0004,
2084 VsmVina = 0x000D0005,
2085 VsmCapabilities = 0x000D0006,
2086 VsmPartitionConfig = 0x000D0007,
2087 GuestVsmPartitionConfig = 0x000D0008,
2088 VsmVpSecureConfigVtl0 = 0x000D0010,
2089 VsmVpSecureConfigVtl1 = 0x000D0011,
2090 VsmVpSecureConfigVtl2 = 0x000D0012,
2091 VsmVpSecureConfigVtl3 = 0x000D0013,
2092 VsmVpSecureConfigVtl4 = 0x000D0014,
2093 VsmVpSecureConfigVtl5 = 0x000D0015,
2094 VsmVpSecureConfigVtl6 = 0x000D0016,
2095 VsmVpSecureConfigVtl7 = 0x000D0017,
2096 VsmVpSecureConfigVtl8 = 0x000D0018,
2097 VsmVpSecureConfigVtl9 = 0x000D0019,
2098 VsmVpSecureConfigVtl10 = 0x000D001A,
2099 VsmVpSecureConfigVtl11 = 0x000D001B,
2100 VsmVpSecureConfigVtl12 = 0x000D001C,
2101 VsmVpSecureConfigVtl13 = 0x000D001D,
2102 VsmVpSecureConfigVtl14 = 0x000D001E,
2103 VsmVpWaitForTlbLock = 0x000D0020,
2104 }
2105 }
2106
2107 impl From<HvRegisterName> for $name {
2108 fn from(name: HvRegisterName) -> Self {
2109 Self(name.0)
2110 }
2111 }
2112
2113 impl From<$name> for HvRegisterName {
2114 fn from(name: $name) -> Self {
2115 Self(name.0)
2116 }
2117 }
2118 };
2119}
2120
2121#[repr(C)]
2126#[derive(Debug, Copy, Clone, PartialEq, Eq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2127pub struct HvRegisterName(pub u32);
2128
2129registers! {
2130 HvAllArchRegisterName {}
2132}
2133
2134impl From<HvAllArchRegisterName> for HvX64RegisterName {
2135 fn from(name: HvAllArchRegisterName) -> Self {
2136 Self(name.0)
2137 }
2138}
2139
2140impl From<HvAllArchRegisterName> for HvArm64RegisterName {
2141 fn from(name: HvAllArchRegisterName) -> Self {
2142 Self(name.0)
2143 }
2144}
2145
2146registers! {
2147 HvX64RegisterName {
2148 Rax = 0x00020000,
2150 Rcx = 0x00020001,
2151 Rdx = 0x00020002,
2152 Rbx = 0x00020003,
2153 Rsp = 0x00020004,
2154 Rbp = 0x00020005,
2155 Rsi = 0x00020006,
2156 Rdi = 0x00020007,
2157 R8 = 0x00020008,
2158 R9 = 0x00020009,
2159 R10 = 0x0002000a,
2160 R11 = 0x0002000b,
2161 R12 = 0x0002000c,
2162 R13 = 0x0002000d,
2163 R14 = 0x0002000e,
2164 R15 = 0x0002000f,
2165 Rip = 0x00020010,
2166 Rflags = 0x00020011,
2167
2168 Xmm0 = 0x00030000,
2170 Xmm1 = 0x00030001,
2171 Xmm2 = 0x00030002,
2172 Xmm3 = 0x00030003,
2173 Xmm4 = 0x00030004,
2174 Xmm5 = 0x00030005,
2175 Xmm6 = 0x00030006,
2176 Xmm7 = 0x00030007,
2177 Xmm8 = 0x00030008,
2178 Xmm9 = 0x00030009,
2179 Xmm10 = 0x0003000A,
2180 Xmm11 = 0x0003000B,
2181 Xmm12 = 0x0003000C,
2182 Xmm13 = 0x0003000D,
2183 Xmm14 = 0x0003000E,
2184 Xmm15 = 0x0003000F,
2185 FpMmx0 = 0x00030010,
2186 FpMmx1 = 0x00030011,
2187 FpMmx2 = 0x00030012,
2188 FpMmx3 = 0x00030013,
2189 FpMmx4 = 0x00030014,
2190 FpMmx5 = 0x00030015,
2191 FpMmx6 = 0x00030016,
2192 FpMmx7 = 0x00030017,
2193 FpControlStatus = 0x00030018,
2194 XmmControlStatus = 0x00030019,
2195
2196 Cr0 = 0x00040000,
2198 Cr2 = 0x00040001,
2199 Cr3 = 0x00040002,
2200 Cr4 = 0x00040003,
2201 Cr8 = 0x00040004,
2202 Xfem = 0x00040005,
2203 IntermediateCr0 = 0x00041000,
2205 IntermediateCr3 = 0x00041002,
2206 IntermediateCr4 = 0x00041003,
2207 IntermediateCr8 = 0x00041004,
2208 Dr0 = 0x00050000,
2210 Dr1 = 0x00050001,
2211 Dr2 = 0x00050002,
2212 Dr3 = 0x00050003,
2213 Dr6 = 0x00050004,
2214 Dr7 = 0x00050005,
2215 Es = 0x00060000,
2217 Cs = 0x00060001,
2218 Ss = 0x00060002,
2219 Ds = 0x00060003,
2220 Fs = 0x00060004,
2221 Gs = 0x00060005,
2222 Ldtr = 0x00060006,
2223 Tr = 0x00060007,
2224 Idtr = 0x00070000,
2226 Gdtr = 0x00070001,
2227 Tsc = 0x00080000,
2229 Efer = 0x00080001,
2230 KernelGsBase = 0x00080002,
2231 ApicBase = 0x00080003,
2232 Pat = 0x00080004,
2233 SysenterCs = 0x00080005,
2234 SysenterEip = 0x00080006,
2235 SysenterEsp = 0x00080007,
2236 Star = 0x00080008,
2237 Lstar = 0x00080009,
2238 Cstar = 0x0008000a,
2239 Sfmask = 0x0008000b,
2240 InitialApicId = 0x0008000c,
2241 MsrMtrrCap = 0x0008000d,
2243 MsrMtrrDefType = 0x0008000e,
2244 MsrMtrrPhysBase0 = 0x00080010,
2245 MsrMtrrPhysBase1 = 0x00080011,
2246 MsrMtrrPhysBase2 = 0x00080012,
2247 MsrMtrrPhysBase3 = 0x00080013,
2248 MsrMtrrPhysBase4 = 0x00080014,
2249 MsrMtrrPhysBase5 = 0x00080015,
2250 MsrMtrrPhysBase6 = 0x00080016,
2251 MsrMtrrPhysBase7 = 0x00080017,
2252 MsrMtrrPhysBase8 = 0x00080018,
2253 MsrMtrrPhysBase9 = 0x00080019,
2254 MsrMtrrPhysBaseA = 0x0008001a,
2255 MsrMtrrPhysBaseB = 0x0008001b,
2256 MsrMtrrPhysBaseC = 0x0008001c,
2257 MsrMtrrPhysBaseD = 0x0008001d,
2258 MsrMtrrPhysBaseE = 0x0008001e,
2259 MsrMtrrPhysBaseF = 0x0008001f,
2260 MsrMtrrPhysMask0 = 0x00080040,
2261 MsrMtrrPhysMask1 = 0x00080041,
2262 MsrMtrrPhysMask2 = 0x00080042,
2263 MsrMtrrPhysMask3 = 0x00080043,
2264 MsrMtrrPhysMask4 = 0x00080044,
2265 MsrMtrrPhysMask5 = 0x00080045,
2266 MsrMtrrPhysMask6 = 0x00080046,
2267 MsrMtrrPhysMask7 = 0x00080047,
2268 MsrMtrrPhysMask8 = 0x00080048,
2269 MsrMtrrPhysMask9 = 0x00080049,
2270 MsrMtrrPhysMaskA = 0x0008004a,
2271 MsrMtrrPhysMaskB = 0x0008004b,
2272 MsrMtrrPhysMaskC = 0x0008004c,
2273 MsrMtrrPhysMaskD = 0x0008004d,
2274 MsrMtrrPhysMaskE = 0x0008004e,
2275 MsrMtrrPhysMaskF = 0x0008004f,
2276 MsrMtrrFix64k00000 = 0x00080070,
2277 MsrMtrrFix16k80000 = 0x00080071,
2278 MsrMtrrFix16kA0000 = 0x00080072,
2279 MsrMtrrFix4kC0000 = 0x00080073,
2280 MsrMtrrFix4kC8000 = 0x00080074,
2281 MsrMtrrFix4kD0000 = 0x00080075,
2282 MsrMtrrFix4kD8000 = 0x00080076,
2283 MsrMtrrFix4kE0000 = 0x00080077,
2284 MsrMtrrFix4kE8000 = 0x00080078,
2285 MsrMtrrFix4kF0000 = 0x00080079,
2286 MsrMtrrFix4kF8000 = 0x0008007a,
2287
2288 TscAux = 0x0008007B,
2289 Bndcfgs = 0x0008007C,
2290 DebugCtl = 0x0008007D,
2291 MCount = 0x0008007E,
2292 ACount = 0x0008007F,
2293
2294 SgxLaunchControl0 = 0x00080080,
2295 SgxLaunchControl1 = 0x00080081,
2296 SgxLaunchControl2 = 0x00080082,
2297 SgxLaunchControl3 = 0x00080083,
2298 SpecCtrl = 0x00080084,
2299 PredCmd = 0x00080085,
2300 VirtSpecCtrl = 0x00080086,
2301 TscVirtualOffset = 0x00080087,
2302 TsxCtrl = 0x00080088,
2303 MsrMcUpdatePatchLevel = 0x00080089,
2304 Available1 = 0x0008008A,
2305 Xss = 0x0008008B,
2306 UCet = 0x0008008C,
2307 SCet = 0x0008008D,
2308 Ssp = 0x0008008E,
2309 Pl0Ssp = 0x0008008F,
2310 Pl1Ssp = 0x00080090,
2311 Pl2Ssp = 0x00080091,
2312 Pl3Ssp = 0x00080092,
2313 InterruptSspTableAddr = 0x00080093,
2314 TscVirtualMultiplier = 0x00080094,
2315 TscDeadline = 0x00080095,
2316 TscAdjust = 0x00080096,
2317 Pasid = 0x00080097,
2318 UmwaitControl = 0x00080098,
2319 Xfd = 0x00080099,
2320 XfdErr = 0x0008009A,
2321
2322 Hypercall = 0x00090001,
2323 RegisterPage = 0x0009001C,
2324
2325 EmulatedTimerPeriod = 0x00090030,
2327 EmulatedTimerControl = 0x00090031,
2328 PmTimerAssist = 0x00090032,
2329
2330 SevControl = 0x00090040,
2332
2333 CrInterceptControl = 0x000E0000,
2334 CrInterceptCr0Mask = 0x000E0001,
2335 CrInterceptCr4Mask = 0x000E0002,
2336 CrInterceptIa32MiscEnableMask = 0x000E0003,
2337 }
2338}
2339
2340registers! {
2341 HvArm64RegisterName {
2342 HypervisorVersion = 0x00000100,
2343 PrivilegesAndFeaturesInfo = 0x00000200,
2344 FeaturesInfo = 0x00000201,
2345 ImplementationLimitsInfo = 0x00000202,
2346 HardwareFeaturesInfo = 0x00000203,
2347 CpuManagementFeaturesInfo = 0x00000204,
2348 PasidFeaturesInfo = 0x00000205,
2349 SkipLevelFeaturesInfo = 0x00000206,
2350 NestedVirtFeaturesInfo = 0x00000207,
2351 IptFeaturesInfo = 0x00000208,
2352 IsolationConfiguration = 0x00000209,
2353
2354 X0 = 0x00020000,
2355 X1 = 0x00020001,
2356 X2 = 0x00020002,
2357 X3 = 0x00020003,
2358 X4 = 0x00020004,
2359 X5 = 0x00020005,
2360 X6 = 0x00020006,
2361 X7 = 0x00020007,
2362 X8 = 0x00020008,
2363 X9 = 0x00020009,
2364 X10 = 0x0002000A,
2365 X11 = 0x0002000B,
2366 X12 = 0x0002000C,
2367 X13 = 0x0002000D,
2368 X14 = 0x0002000E,
2369 X15 = 0x0002000F,
2370 X16 = 0x00020010,
2371 X17 = 0x00020011,
2372 X18 = 0x00020012,
2373 X19 = 0x00020013,
2374 X20 = 0x00020014,
2375 X21 = 0x00020015,
2376 X22 = 0x00020016,
2377 X23 = 0x00020017,
2378 X24 = 0x00020018,
2379 X25 = 0x00020019,
2380 X26 = 0x0002001A,
2381 X27 = 0x0002001B,
2382 X28 = 0x0002001C,
2383 XFp = 0x0002001D,
2384 XLr = 0x0002001E,
2385 XSp = 0x0002001F, XSpEl0 = 0x00020020,
2387 XSpElx = 0x00020021,
2388 XPc = 0x00020022,
2389 Cpsr = 0x00020023,
2390 SpsrEl2 = 0x00021002,
2391
2392 SctlrEl1 = 0x00040002,
2393 Ttbr0El1 = 0x00040005,
2394 Ttbr1El1 = 0x00040006,
2395 TcrEl1 = 0x00040007,
2396 EsrEl1 = 0x00040008,
2397 FarEl1 = 0x00040009,
2398 MairEl1 = 0x0004000b,
2399 VbarEl1 = 0x0004000c,
2400 ElrEl1 = 0x00040015,
2401 }
2402}
2403
2404#[repr(C)]
2405#[derive(Clone, Copy, Debug, Eq, PartialEq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2406pub struct HvRegisterValue(pub AlignedU128);
2407
2408impl HvRegisterValue {
2409 pub fn as_u128(&self) -> u128 {
2410 self.0.into()
2411 }
2412
2413 pub fn as_u64(&self) -> u64 {
2414 self.as_u128() as u64
2415 }
2416
2417 pub fn as_u32(&self) -> u32 {
2418 self.as_u128() as u32
2419 }
2420
2421 pub fn as_u16(&self) -> u16 {
2422 self.as_u128() as u16
2423 }
2424
2425 pub fn as_u8(&self) -> u8 {
2426 self.as_u128() as u8
2427 }
2428
2429 pub fn as_table(&self) -> HvX64TableRegister {
2430 HvX64TableRegister::read_from_prefix(self.as_bytes())
2431 .unwrap()
2432 .0 }
2434
2435 pub fn as_segment(&self) -> HvX64SegmentRegister {
2436 HvX64SegmentRegister::read_from_prefix(self.as_bytes())
2437 .unwrap()
2438 .0 }
2440}
2441
2442impl From<u8> for HvRegisterValue {
2443 fn from(val: u8) -> Self {
2444 (val as u128).into()
2445 }
2446}
2447
2448impl From<u16> for HvRegisterValue {
2449 fn from(val: u16) -> Self {
2450 (val as u128).into()
2451 }
2452}
2453
2454impl From<u32> for HvRegisterValue {
2455 fn from(val: u32) -> Self {
2456 (val as u128).into()
2457 }
2458}
2459
2460impl From<u64> for HvRegisterValue {
2461 fn from(val: u64) -> Self {
2462 (val as u128).into()
2463 }
2464}
2465
2466impl From<u128> for HvRegisterValue {
2467 fn from(val: u128) -> Self {
2468 Self(val.into())
2469 }
2470}
2471
2472#[repr(C)]
2473#[derive(Clone, Copy, Debug, Eq, PartialEq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2474pub struct HvX64TableRegister {
2475 pub pad: [u16; 3],
2476 pub limit: u16,
2477 pub base: u64,
2478}
2479
2480impl From<HvX64TableRegister> for HvRegisterValue {
2481 fn from(val: HvX64TableRegister) -> Self {
2482 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2484}
2485
2486impl From<HvRegisterValue> for HvX64TableRegister {
2487 fn from(val: HvRegisterValue) -> Self {
2488 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2490}
2491
2492#[repr(C)]
2493#[derive(Clone, Copy, Debug, Eq, PartialEq, IntoBytes, Immutable, KnownLayout, FromBytes)]
2494pub struct HvX64SegmentRegister {
2495 pub base: u64,
2496 pub limit: u32,
2497 pub selector: u16,
2498 pub attributes: u16,
2499}
2500
2501impl From<HvX64SegmentRegister> for HvRegisterValue {
2502 fn from(val: HvX64SegmentRegister) -> Self {
2503 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2505}
2506
2507impl From<HvRegisterValue> for HvX64SegmentRegister {
2508 fn from(val: HvRegisterValue) -> Self {
2509 Self::read_from_prefix(val.as_bytes()).unwrap().0 }
2511}
2512
2513#[bitfield(u64)]
2514#[derive(IntoBytes, Immutable, KnownLayout, FromBytes, PartialEq, Eq)]
2515pub struct HvDeliverabilityNotificationsRegister {
2516 pub nmi_notification: bool,
2518 pub interrupt_notification: bool,
2520 #[bits(4)]
2522 pub interrupt_priority: u8,
2524 #[bits(42)]
2525 pub reserved: u64,
2526 pub sints: u16,
2527}
2528
2529open_enum! {
2530 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2531 pub enum HvVtlEntryReason: u32 {
2532 RESERVED = 0,
2534
2535 VTL_CALL = 1,
2537
2538 INTERRUPT = 2,
2540
2541 INTERCEPT = 3,
2543 }
2544}
2545
2546#[repr(C)]
2547#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2548pub struct HvVpVtlControl {
2549 pub entry_reason: HvVtlEntryReason,
2554
2555 pub vina_status: u8,
2557 pub reserved_z0: u8,
2558 pub reserved_z1: u16,
2559
2560 pub registers: [u64; 2],
2565}
2566
2567#[bitfield(u64)]
2568#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2569pub struct HvRegisterVsmVina {
2570 pub vector: u8,
2571 pub enabled: bool,
2572 pub auto_reset: bool,
2573 pub auto_eoi: bool,
2574 #[bits(53)]
2575 pub reserved: u64,
2576}
2577
2578#[repr(C)]
2579#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2580pub struct HvVpAssistPage {
2581 pub apic_assist: u32,
2583 pub reserved_z0: u32,
2584
2585 pub vtl_control: HvVpVtlControl,
2587
2588 pub nested_enlightenments_control: u64,
2589 pub enlighten_vm_entry: u8,
2590 pub reserved_z1: [u8; 7],
2591 pub current_nested_vmcs: u64,
2592 pub synthetic_time_unhalted_timer_expired: u8,
2593 pub reserved_z2: [u8; 7],
2594 pub virtualization_fault_information: [u8; 40],
2595 pub reserved_z3: u64,
2596 pub intercept_message: HvMessage,
2597 pub vtl_return_actions: [u8; 256],
2598}
2599
2600#[repr(C)]
2601#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2602pub struct HvVpAssistPageActionSignalEvent {
2603 pub action_type: u64,
2604 pub target_vp: u32,
2605 pub target_vtl: u8,
2606 pub target_sint: u8,
2607 pub flag_number: u16,
2608}
2609
2610open_enum! {
2611 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2612 pub enum HvInterceptAccessType: u8 {
2613 READ = 0,
2614 WRITE = 1,
2615 EXECUTE = 2,
2616 }
2617}
2618
2619#[bitfield(u16)]
2620#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2621pub struct HvX64VpExecutionState {
2622 #[bits(2)]
2623 pub cpl: u8,
2624 pub cr0_pe: bool,
2625 pub cr0_am: bool,
2626 pub efer_lma: bool,
2627 pub debug_active: bool,
2628 pub interruption_pending: bool,
2629 #[bits(4)]
2630 pub vtl: u8,
2631 pub enclave_mode: bool,
2632 pub interrupt_shadow: bool,
2633 pub virtualization_fault_active: bool,
2634 #[bits(2)]
2635 pub reserved: u8,
2636}
2637
2638#[bitfield(u16)]
2639#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2640pub struct HvArm64VpExecutionState {
2641 #[bits(2)]
2642 pub cpl: u8,
2643 pub debug_active: bool,
2644 pub interruption_pending: bool,
2645 #[bits(4)]
2646 pub vtl: u8,
2647 pub virtualization_fault_active: bool,
2648 #[bits(7)]
2649 pub reserved: u8,
2650}
2651
2652#[repr(C)]
2653#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2654pub struct HvX64InterceptMessageHeader {
2655 pub vp_index: u32,
2656 pub instruction_length_and_cr8: u8,
2657 pub intercept_access_type: HvInterceptAccessType,
2658 pub execution_state: HvX64VpExecutionState,
2659 pub cs_segment: HvX64SegmentRegister,
2660 pub rip: u64,
2661 pub rflags: u64,
2662}
2663
2664impl MessagePayload for HvX64InterceptMessageHeader {}
2665
2666impl HvX64InterceptMessageHeader {
2667 pub fn instruction_len(&self) -> u8 {
2668 self.instruction_length_and_cr8 & 0xf
2669 }
2670
2671 pub fn cr8(&self) -> u8 {
2672 self.instruction_length_and_cr8 >> 4
2673 }
2674}
2675
2676#[repr(C)]
2677#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2678pub struct HvArm64InterceptMessageHeader {
2679 pub vp_index: u32,
2680 pub instruction_length: u8,
2681 pub intercept_access_type: HvInterceptAccessType,
2682 pub execution_state: HvArm64VpExecutionState,
2683 pub pc: u64,
2684 pub cspr: u64,
2685}
2686const_assert!(size_of::<HvArm64InterceptMessageHeader>() == 0x18);
2687
2688impl MessagePayload for HvArm64InterceptMessageHeader {}
2689
2690#[repr(transparent)]
2691#[derive(Debug, Copy, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2692pub struct HvX64IoPortAccessInfo(pub u8);
2693
2694impl HvX64IoPortAccessInfo {
2695 pub fn new(access_size: u8, string_op: bool, rep_prefix: bool) -> Self {
2696 let mut info = access_size & 0x7;
2697
2698 if string_op {
2699 info |= 0x8;
2700 }
2701
2702 if rep_prefix {
2703 info |= 0x10;
2704 }
2705
2706 Self(info)
2707 }
2708
2709 pub fn access_size(&self) -> u8 {
2710 self.0 & 0x7
2711 }
2712
2713 pub fn string_op(&self) -> bool {
2714 self.0 & 0x8 != 0
2715 }
2716
2717 pub fn rep_prefix(&self) -> bool {
2718 self.0 & 0x10 != 0
2719 }
2720}
2721
2722#[repr(C)]
2723#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2724pub struct HvX64IoPortInterceptMessage {
2725 pub header: HvX64InterceptMessageHeader,
2726 pub port_number: u16,
2727 pub access_info: HvX64IoPortAccessInfo,
2728 pub instruction_byte_count: u8,
2729 pub reserved: u32,
2730 pub rax: u64,
2731 pub instruction_bytes: [u8; 16],
2732 pub ds_segment: HvX64SegmentRegister,
2733 pub es_segment: HvX64SegmentRegister,
2734 pub rcx: u64,
2735 pub rsi: u64,
2736 pub rdi: u64,
2737}
2738
2739impl MessagePayload for HvX64IoPortInterceptMessage {}
2740
2741#[bitfield(u8)]
2742#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2743pub struct HvX64MemoryAccessInfo {
2744 pub gva_valid: bool,
2745 pub gva_gpa_valid: bool,
2746 pub hypercall_output_pending: bool,
2747 pub tlb_locked: bool,
2748 pub supervisor_shadow_stack: bool,
2749 #[bits(3)]
2750 pub reserved1: u8,
2751}
2752
2753#[bitfield(u8)]
2754#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2755pub struct HvArm64MemoryAccessInfo {
2756 pub gva_valid: bool,
2757 pub gva_gpa_valid: bool,
2758 pub hypercall_output_pending: bool,
2759 #[bits(5)]
2760 pub reserved1: u8,
2761}
2762
2763open_enum! {
2764 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2765 pub enum HvCacheType: u32 {
2766 #![expect(non_upper_case_globals)]
2767 HvCacheTypeUncached = 0,
2768 HvCacheTypeWriteCombining = 1,
2769 HvCacheTypeWriteThrough = 4,
2770 HvCacheTypeWriteProtected = 5,
2771 HvCacheTypeWriteBack = 6,
2772 }
2773}
2774
2775#[repr(C)]
2776#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2777pub struct HvX64MemoryInterceptMessage {
2778 pub header: HvX64InterceptMessageHeader,
2779 pub cache_type: HvCacheType,
2780 pub instruction_byte_count: u8,
2781 pub memory_access_info: HvX64MemoryAccessInfo,
2782 pub tpr_priority: u8,
2783 pub reserved: u8,
2784 pub guest_virtual_address: u64,
2785 pub guest_physical_address: u64,
2786 pub instruction_bytes: [u8; 16],
2787}
2788
2789impl MessagePayload for HvX64MemoryInterceptMessage {}
2790const_assert!(size_of::<HvX64MemoryInterceptMessage>() == 0x50);
2791
2792#[repr(C)]
2793#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2794pub struct HvArm64MemoryInterceptMessage {
2795 pub header: HvArm64InterceptMessageHeader,
2796 pub cache_type: HvCacheType,
2797 pub instruction_byte_count: u8,
2798 pub memory_access_info: HvArm64MemoryAccessInfo,
2799 pub reserved1: u16,
2800 pub instruction_bytes: [u8; 4],
2801 pub reserved2: u32,
2802 pub guest_virtual_address: u64,
2803 pub guest_physical_address: u64,
2804 pub syndrome: u64,
2805}
2806
2807impl MessagePayload for HvArm64MemoryInterceptMessage {}
2808const_assert!(size_of::<HvArm64MemoryInterceptMessage>() == 0x40);
2809
2810#[repr(C)]
2811#[derive(Debug, FromBytes, IntoBytes, Immutable, KnownLayout)]
2812pub struct HvArm64MmioInterceptMessage {
2813 pub header: HvArm64InterceptMessageHeader,
2814 pub guest_physical_address: u64,
2815 pub access_size: u32,
2816 pub data: [u8; 32],
2817 pub padding: u32,
2818}
2819
2820impl MessagePayload for HvArm64MmioInterceptMessage {}
2821const_assert!(size_of::<HvArm64MmioInterceptMessage>() == 0x48);
2822
2823#[repr(C)]
2824#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2825pub struct HvX64MsrInterceptMessage {
2826 pub header: HvX64InterceptMessageHeader,
2827 pub msr_number: u32,
2828 pub reserved: u32,
2829 pub rdx: u64,
2830 pub rax: u64,
2831}
2832
2833impl MessagePayload for HvX64MsrInterceptMessage {}
2834
2835#[repr(C)]
2836#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2837pub struct HvX64SipiInterceptMessage {
2838 pub header: HvX64InterceptMessageHeader,
2839 pub target_vp_index: u32,
2840 pub vector: u32,
2841}
2842
2843impl MessagePayload for HvX64SipiInterceptMessage {}
2844
2845#[repr(C)]
2846#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2847pub struct HvX64SynicSintDeliverableMessage {
2848 pub header: HvX64InterceptMessageHeader,
2849 pub deliverable_sints: u16,
2850 pub rsvd1: u16,
2851 pub rsvd2: u32,
2852}
2853
2854impl MessagePayload for HvX64SynicSintDeliverableMessage {}
2855
2856#[repr(C)]
2857#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2858pub struct HvArm64SynicSintDeliverableMessage {
2859 pub header: HvArm64InterceptMessageHeader,
2860 pub deliverable_sints: u16,
2861 pub rsvd1: u16,
2862 pub rsvd2: u32,
2863}
2864
2865impl MessagePayload for HvArm64SynicSintDeliverableMessage {}
2866
2867#[repr(C)]
2868#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2869pub struct HvX64InterruptionDeliverableMessage {
2870 pub header: HvX64InterceptMessageHeader,
2871 pub deliverable_type: HvX64PendingInterruptionType,
2872 pub rsvd: [u8; 3],
2873 pub rsvd2: u32,
2874}
2875
2876impl MessagePayload for HvX64InterruptionDeliverableMessage {}
2877
2878open_enum! {
2879 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2880 pub enum HvX64PendingInterruptionType: u8 {
2881 HV_X64_PENDING_INTERRUPT = 0,
2882 HV_X64_PENDING_NMI = 2,
2883 HV_X64_PENDING_EXCEPTION = 3,
2884 HV_X64_PENDING_SOFTWARE_INTERRUPT = 4,
2885 HV_X64_PENDING_PRIVILEGED_SOFTWARE_EXCEPTION = 5,
2886 HV_X64_PENDING_SOFTWARE_EXCEPTION = 6,
2887 }
2888}
2889
2890#[repr(C)]
2891#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2892pub struct HvX64HypercallInterceptMessage {
2893 pub header: HvX64InterceptMessageHeader,
2894 pub rax: u64,
2895 pub rbx: u64,
2896 pub rcx: u64,
2897 pub rdx: u64,
2898 pub r8: u64,
2899 pub rsi: u64,
2900 pub rdi: u64,
2901 pub xmm_registers: [AlignedU128; 6],
2902 pub flags: HvHypercallInterceptMessageFlags,
2903 pub rsvd2: [u32; 3],
2904}
2905
2906impl MessagePayload for HvX64HypercallInterceptMessage {}
2907
2908#[repr(C)]
2909#[derive(Debug, Clone, IntoBytes, Immutable, KnownLayout, FromBytes)]
2910pub struct HvArm64HypercallInterceptMessage {
2911 pub header: HvArm64InterceptMessageHeader,
2912 pub immediate: u16,
2913 pub reserved: u16,
2914 pub flags: HvHypercallInterceptMessageFlags,
2915 pub x: [u64; 18],
2916}
2917
2918impl MessagePayload for HvArm64HypercallInterceptMessage {}
2919
2920#[bitfield(u32)]
2921#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2922pub struct HvHypercallInterceptMessageFlags {
2923 pub is_isolated: bool,
2924 #[bits(31)]
2925 _reserved: u32,
2926}
2927
2928#[repr(C)]
2929#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2930pub struct HvX64CpuidInterceptMessage {
2931 pub header: HvX64InterceptMessageHeader,
2932 pub rax: u64,
2933 pub rcx: u64,
2934 pub rdx: u64,
2935 pub rbx: u64,
2936 pub default_result_rax: u64,
2937 pub default_result_rcx: u64,
2938 pub default_result_rdx: u64,
2939 pub default_result_rbx: u64,
2940}
2941
2942impl MessagePayload for HvX64CpuidInterceptMessage {}
2943
2944#[bitfield(u8)]
2945#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
2946pub struct HvX64ExceptionInfo {
2947 pub error_code_valid: bool,
2948 pub software_exception: bool,
2949 #[bits(6)]
2950 reserved: u8,
2951}
2952
2953#[repr(C)]
2954#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2955pub struct HvX64ExceptionInterceptMessage {
2956 pub header: HvX64InterceptMessageHeader,
2957 pub vector: u16,
2958 pub exception_info: HvX64ExceptionInfo,
2959 pub instruction_byte_count: u8,
2960 pub error_code: u32,
2961 pub exception_parameter: u64,
2962 pub reserved: u64,
2963 pub instruction_bytes: [u8; 16],
2964 pub ds_segment: HvX64SegmentRegister,
2965 pub ss_segment: HvX64SegmentRegister,
2966 pub rax: u64,
2967 pub rcx: u64,
2968 pub rdx: u64,
2969 pub rbx: u64,
2970 pub rsp: u64,
2971 pub rbp: u64,
2972 pub rsi: u64,
2973 pub rdi: u64,
2974 pub r8: u64,
2975 pub r9: u64,
2976 pub r10: u64,
2977 pub r11: u64,
2978 pub r12: u64,
2979 pub r13: u64,
2980 pub r14: u64,
2981 pub r15: u64,
2982}
2983
2984impl MessagePayload for HvX64ExceptionInterceptMessage {}
2985
2986#[repr(C)]
2987#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2988pub struct HvInvalidVpRegisterMessage {
2989 pub vp_index: u32,
2990 pub reserved: u32,
2991}
2992
2993impl MessagePayload for HvInvalidVpRegisterMessage {}
2994
2995#[repr(C)]
2996#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
2997pub struct HvX64ApicEoiMessage {
2998 pub vp_index: u32,
2999 pub interrupt_vector: u32,
3000}
3001
3002impl MessagePayload for HvX64ApicEoiMessage {}
3003
3004#[repr(C)]
3005#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3006pub struct HvX64UnrecoverableExceptionMessage {
3007 pub header: HvX64InterceptMessageHeader,
3008}
3009
3010impl MessagePayload for HvX64UnrecoverableExceptionMessage {}
3011
3012#[repr(C)]
3013#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3014pub struct HvX64HaltMessage {
3015 pub header: HvX64InterceptMessageHeader,
3016}
3017
3018impl MessagePayload for HvX64HaltMessage {}
3019
3020#[repr(C)]
3021#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3022pub struct HvArm64ResetInterceptMessage {
3023 pub header: HvArm64InterceptMessageHeader,
3024 pub reset_type: HvArm64ResetType,
3025 pub padding: u32,
3026}
3027
3028impl MessagePayload for HvArm64ResetInterceptMessage {}
3029
3030open_enum! {
3031 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3032 pub enum HvArm64ResetType: u32 {
3033 POWER_OFF = 0,
3034 REBOOT = 1,
3035 }
3036}
3037
3038#[bitfield(u8)]
3039#[derive(IntoBytes, Immutable, FromBytes)]
3040pub struct HvX64RegisterInterceptMessageFlags {
3041 pub is_memory_op: bool,
3042 #[bits(7)]
3043 _rsvd: u8,
3044}
3045
3046#[repr(C)]
3047#[derive(IntoBytes, Immutable, FromBytes)]
3048pub struct HvX64RegisterInterceptMessage {
3049 pub header: HvX64InterceptMessageHeader,
3050 pub flags: HvX64RegisterInterceptMessageFlags,
3051 pub rsvd: u8,
3052 pub rsvd2: u16,
3053 pub register_name: HvX64RegisterName,
3054 pub access_info: HvX64RegisterAccessInfo,
3055}
3056
3057#[repr(transparent)]
3058#[derive(IntoBytes, Immutable, FromBytes)]
3059pub struct HvX64RegisterAccessInfo(u128);
3060
3061impl HvX64RegisterAccessInfo {
3062 pub fn new_source_value(source_value: HvRegisterValue) -> Self {
3063 Self(source_value.as_u128())
3064 }
3065}
3066
3067open_enum! {
3068 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3069 pub enum HvInterruptType : u32 {
3070 #![expect(non_upper_case_globals)]
3071 HvArm64InterruptTypeFixed = 0x0000,
3072 HvX64InterruptTypeFixed = 0x0000,
3073 HvX64InterruptTypeLowestPriority = 0x0001,
3074 HvX64InterruptTypeSmi = 0x0002,
3075 HvX64InterruptTypeRemoteRead = 0x0003,
3076 HvX64InterruptTypeNmi = 0x0004,
3077 HvX64InterruptTypeInit = 0x0005,
3078 HvX64InterruptTypeSipi = 0x0006,
3079 HvX64InterruptTypeExtInt = 0x0007,
3080 HvX64InterruptTypeLocalInt0 = 0x0008,
3081 HvX64InterruptTypeLocalInt1 = 0x0009,
3082 }
3083}
3084
3085#[bitfield(u64)]
3092#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3093pub struct HvInterruptControl {
3094 interrupt_type_value: u32,
3095 pub x86_level_triggered: bool,
3096 pub x86_logical_destination_mode: bool,
3097 pub arm64_asserted: bool,
3098 #[bits(29)]
3099 pub unused: u32,
3100}
3101
3102impl HvInterruptControl {
3103 pub fn interrupt_type(&self) -> HvInterruptType {
3104 HvInterruptType(self.interrupt_type_value())
3105 }
3106
3107 pub fn set_interrupt_type(&mut self, ty: HvInterruptType) {
3108 self.set_interrupt_type_value(ty.0)
3109 }
3110
3111 pub fn with_interrupt_type(self, ty: HvInterruptType) -> Self {
3112 self.with_interrupt_type_value(ty.0)
3113 }
3114}
3115
3116#[bitfield(u64)]
3117pub struct HvRegisterVsmCapabilities {
3118 pub dr6_shared: bool,
3119 pub mbec_vtl_mask: u16,
3120 pub deny_lower_vtl_startup: bool,
3121 pub supervisor_shadow_stack: bool,
3122 pub hardware_hvpt_available: bool,
3123 pub software_hvpt_available: bool,
3124 #[bits(6)]
3125 pub hardware_hvpt_range_bits: u8,
3126 pub intercept_page_available: bool,
3127 pub return_action_available: bool,
3128 pub vtl0_alias_map_available: bool,
3133 pub intercept_not_present_available: bool,
3138 pub install_intercept_ex: bool,
3139 pub intercept_system_reset_available: bool,
3141 #[bits(31)]
3142 pub reserved: u64,
3143}
3144
3145#[bitfield(u64)]
3146pub struct HvRegisterVsmPartitionConfig {
3147 pub enable_vtl_protection: bool,
3148 #[bits(4)]
3149 pub default_vtl_protection_mask: u8,
3150 pub zero_memory_on_reset: bool,
3151 pub deny_lower_vtl_startup: bool,
3152 pub intercept_acceptance: bool,
3153 pub intercept_enable_vtl_protection: bool,
3154 pub intercept_vp_startup: bool,
3155 pub intercept_cpuid_unimplemented: bool,
3156 pub intercept_unrecoverable_exception: bool,
3157 pub intercept_page: bool,
3158 pub intercept_restore_partition_time: bool,
3159 pub intercept_not_present: bool,
3162 pub intercept_system_reset: bool,
3163 #[bits(48)]
3164 pub reserved: u64,
3165}
3166
3167#[bitfield(u64)]
3168pub struct HvRegisterVsmPartitionStatus {
3169 #[bits(16)]
3170 pub enabled_vtl_set: u16,
3171 #[bits(4)]
3172 pub maximum_vtl: u8,
3173 #[bits(16)]
3174 pub mbec_enabled_vtl_set: u16,
3175 #[bits(4)]
3176 pub supervisor_shadow_stack_enabled_vtl_set: u8,
3177 #[bits(24)]
3178 pub reserved: u64,
3179}
3180
3181#[bitfield(u64)]
3182pub struct HvRegisterGuestVsmPartitionConfig {
3183 #[bits(4)]
3184 pub maximum_vtl: u8,
3185 #[bits(60)]
3186 pub reserved: u64,
3187}
3188
3189#[bitfield(u64)]
3190pub struct HvRegisterVsmVpStatus {
3191 #[bits(4)]
3192 pub active_vtl: u8,
3193 pub active_mbec_enabled: bool,
3194 #[bits(11)]
3195 pub reserved_mbz0: u16,
3196 #[bits(16)]
3197 pub enabled_vtl_set: u16,
3198 #[bits(32)]
3199 pub reserved_mbz1: u32,
3200}
3201
3202#[bitfield(u64)]
3203pub struct HvRegisterVsmCodePageOffsets {
3204 #[bits(12)]
3205 pub call_offset: u16,
3206 #[bits(12)]
3207 pub return_offset: u16,
3208 #[bits(40)]
3209 pub reserved: u64,
3210}
3211
3212#[repr(C)]
3213#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3214pub struct HvStimerState {
3215 pub undelivered_message_pending: u32,
3216 pub reserved: u32,
3217 pub config: u64,
3218 pub count: u64,
3219 pub adjustment: u64,
3220 pub undelivered_expiration_time: u64,
3221}
3222
3223#[repr(C)]
3224#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3225pub struct HvSyntheticTimersState {
3226 pub timers: [HvStimerState; 4],
3227 pub reserved: [u64; 5],
3228}
3229
3230#[bitfield(u64)]
3231pub struct HvInternalActivityRegister {
3232 pub startup_suspend: bool,
3233 pub halt_suspend: bool,
3234 pub idle_suspend: bool,
3235 #[bits(61)]
3236 pub reserved: u64,
3237}
3238
3239#[bitfield(u64)]
3240pub struct HvSynicSint {
3241 pub vector: u8,
3242 _reserved: u8,
3243 pub masked: bool,
3244 pub auto_eoi: bool,
3245 pub polling: bool,
3246 _reserved2: bool,
3247 pub proxy: bool,
3248 #[bits(43)]
3249 _reserved2: u64,
3250}
3251
3252#[bitfield(u64)]
3253pub struct HvSynicScontrol {
3254 pub enabled: bool,
3255 #[bits(63)]
3256 _reserved: u64,
3257}
3258
3259#[bitfield(u64)]
3260pub struct HvSynicSimpSiefp {
3261 pub enabled: bool,
3262 #[bits(11)]
3263 _reserved: u64,
3264 #[bits(52)]
3265 pub base_gpn: u64,
3266}
3267
3268#[bitfield(u64)]
3269pub struct HvSynicStimerConfig {
3270 pub enabled: bool,
3271 pub periodic: bool,
3272 pub lazy: bool,
3273 pub auto_enable: bool,
3274 pub apic_vector: u8,
3276 pub direct_mode: bool,
3277 #[bits(3)]
3278 pub _reserved1: u8,
3279 #[bits(4)]
3280 pub sint: u8,
3281 #[bits(44)]
3282 pub _reserved2: u64,
3283}
3284
3285pub const HV_X64_PENDING_EVENT_EXCEPTION: u8 = 0;
3286pub const HV_X64_PENDING_EVENT_MEMORY_INTERCEPT: u8 = 1;
3287pub const HV_X64_PENDING_EVENT_NESTED_MEMORY_INTERCEPT: u8 = 2;
3288pub const HV_X64_PENDING_EVENT_VIRTUALIZATION_FAULT: u8 = 3;
3289pub const HV_X64_PENDING_EVENT_HYPERCALL_OUTPUT: u8 = 4;
3290pub const HV_X64_PENDING_EVENT_EXT_INT: u8 = 5;
3291pub const HV_X64_PENDING_EVENT_SHADOW_IPT: u8 = 6;
3292
3293#[bitfield(u128)]
3295pub struct HvX64PendingExceptionEvent {
3296 pub event_pending: bool,
3297 #[bits(3)]
3298 pub event_type: u8,
3299 #[bits(4)]
3300 pub reserved0: u8,
3301
3302 pub deliver_error_code: bool,
3303 #[bits(7)]
3304 pub reserved1: u8,
3305 pub vector: u16,
3306 pub error_code: u32,
3307 pub exception_parameter: u64,
3308}
3309
3310#[bitfield(u128)]
3312pub struct HvX64PendingVirtualizationFaultEvent {
3313 pub event_pending: bool,
3314 #[bits(3)]
3315 pub event_type: u8,
3316 #[bits(4)]
3317 pub reserved0: u8,
3318
3319 pub reserved1: u8,
3320 pub parameter0: u16,
3321 pub code: u32,
3322 pub parameter1: u64,
3323}
3324
3325#[bitfield(u8)]
3327#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3328pub struct HvX64PendingEventMemoryInterceptPendingEventHeader {
3329 pub event_pending: bool,
3330 #[bits(3)]
3331 pub event_type: u8,
3332 #[bits(4)]
3333 _reserved0: u8,
3334}
3335
3336#[bitfield(u8)]
3338#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3339pub struct HvX64PendingEventMemoryInterceptAccessFlags {
3340 pub guest_linear_address_valid: bool,
3342 pub caused_by_gpa_access: bool,
3345 #[bits(6)]
3346 _reserved1: u8,
3347}
3348
3349#[repr(C)]
3351#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3352pub struct HvX64PendingEventMemoryIntercept {
3353 pub event_header: HvX64PendingEventMemoryInterceptPendingEventHeader,
3354 pub target_vtl: u8,
3357 pub access_type: HvInterceptAccessType,
3359 pub access_flags: HvX64PendingEventMemoryInterceptAccessFlags,
3360 pub _reserved2: u32,
3361 pub guest_linear_address: u64,
3363 pub guest_physical_address: u64,
3365 pub _reserved3: u64,
3366}
3367const_assert!(size_of::<HvX64PendingEventMemoryIntercept>() == 0x20);
3368
3369#[bitfield(u128)]
3373pub struct HvX64PendingHypercallOutputEvent {
3374 pub event_pending: bool,
3375 #[bits(3)]
3376 pub event_type: u8,
3377 #[bits(4)]
3378 pub reserved0: u8,
3379
3380 pub retired: bool,
3382
3383 #[bits(23)]
3384 pub reserved1: u32,
3385
3386 pub output_size: u32,
3388
3389 pub output_gpa: u64,
3391}
3392
3393#[bitfield(u128)]
3395pub struct HvX64PendingExtIntEvent {
3396 pub event_pending: bool,
3397 #[bits(3)]
3398 pub event_type: u8,
3399 #[bits(4)]
3400 pub reserved0: u8,
3401 pub vector: u8,
3402 #[bits(48)]
3403 pub reserved1: u64,
3404 pub reserved2: u64,
3405}
3406
3407#[bitfield(u128)]
3409pub struct HvX64PendingShadowIptEvent {
3410 pub event_pending: bool,
3411 #[bits(4)]
3412 pub event_type: u8,
3413 #[bits(59)]
3414 pub reserved0: u64,
3415
3416 pub reserved1: u64,
3417}
3418
3419#[bitfield(u128)]
3420#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3421pub struct HvX64PendingEventReg0 {
3422 pub event_pending: bool,
3423 #[bits(3)]
3424 pub event_type: u8,
3425 #[bits(4)]
3426 pub reserved: u8,
3427 #[bits(120)]
3428 pub data: u128,
3429}
3430
3431#[repr(C)]
3432#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3433pub struct HvX64PendingEvent {
3434 pub reg_0: HvX64PendingEventReg0,
3435 pub reg_1: AlignedU128,
3436}
3437const_assert!(size_of::<HvX64PendingEvent>() == 0x20);
3438
3439impl From<HvX64PendingExceptionEvent> for HvX64PendingEvent {
3440 fn from(exception_event: HvX64PendingExceptionEvent) -> Self {
3441 HvX64PendingEvent {
3442 reg_0: HvX64PendingEventReg0::from(u128::from(exception_event)),
3443 reg_1: 0u128.into(),
3444 }
3445 }
3446}
3447
3448#[bitfield(u64)]
3449#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3450pub struct HvX64PendingInterruptionRegister {
3451 pub interruption_pending: bool,
3452 #[bits(3)]
3453 pub interruption_type: u8,
3454 pub deliver_error_code: bool,
3455 #[bits(4)]
3456 pub instruction_length: u8,
3457 pub nested_event: bool,
3458 #[bits(6)]
3459 pub reserved: u8,
3460 pub interruption_vector: u16,
3461 pub error_code: u32,
3462}
3463
3464#[bitfield(u64)]
3465#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3466pub struct HvX64InterruptStateRegister {
3467 pub interrupt_shadow: bool,
3468 pub nmi_masked: bool,
3469 #[bits(62)]
3470 pub reserved: u64,
3471}
3472
3473#[bitfield(u64)]
3474#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3475pub struct HvInstructionEmulatorHintsRegister {
3476 pub partition_secure_vtl_enabled: bool,
3478 pub mbec_user_execute_control: bool,
3481 #[bits(62)]
3482 pub _padding: u64,
3483}
3484
3485open_enum! {
3486 #[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3487 pub enum HvAarch64PendingEventType: u8 {
3488 EXCEPTION = 0,
3489 SYNTHETIC_EXCEPTION = 1,
3490 HYPERCALL_OUTPUT = 2,
3491 }
3492}
3493
3494impl HvAarch64PendingEventType {
3496 const fn from_bits(val: u8) -> Self {
3497 HvAarch64PendingEventType(val)
3498 }
3499
3500 const fn into_bits(self) -> u8 {
3501 self.0
3502 }
3503}
3504
3505#[bitfield[u8]]
3506#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3507pub struct HvAarch64PendingEventHeader {
3508 #[bits(1)]
3509 pub event_pending: bool,
3510 #[bits(3)]
3511 pub event_type: HvAarch64PendingEventType,
3512 #[bits(4)]
3513 pub reserved: u8,
3514}
3515
3516#[repr(C)]
3517#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3518pub struct HvAarch64PendingExceptionEvent {
3519 pub header: HvAarch64PendingEventHeader,
3520 pub _padding: [u8; 7],
3521 pub syndrome: u64,
3522 pub fault_address: u64,
3523}
3524
3525#[bitfield[u8]]
3526#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3527pub struct HvAarch64PendingHypercallOutputEventFlags {
3528 #[bits(1)]
3529 pub retired: u8,
3530 #[bits(7)]
3531 pub reserved: u8,
3532}
3533
3534#[repr(C)]
3535#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3536pub struct HvAarch64PendingHypercallOutputEvent {
3537 pub header: HvAarch64PendingEventHeader,
3538 pub flags: HvAarch64PendingHypercallOutputEventFlags,
3539 pub reserved: u16,
3540 pub output_size: u32,
3541 pub output_gpa: u64,
3542}
3543
3544#[repr(C)]
3545#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3546pub struct HvAarch64PendingEvent {
3547 pub header: HvAarch64PendingEventHeader,
3548 pub event_data: [u8; 15],
3549 pub _padding: [u64; 2],
3550}
3551
3552#[bitfield(u32)]
3553#[derive(PartialEq, Eq, IntoBytes, Immutable, KnownLayout, FromBytes)]
3554pub struct HvMapGpaFlags {
3555 pub readable: bool,
3556 pub writable: bool,
3557 pub kernel_executable: bool,
3558 pub user_executable: bool,
3559 pub supervisor_shadow_stack: bool,
3560 pub paging_writability: bool,
3561 pub verify_paging_writability: bool,
3562 #[bits(8)]
3563 _padding0: u32,
3564 pub adjustable: bool,
3565 #[bits(16)]
3566 _padding1: u32,
3567}
3568
3569pub const HV_MAP_GPA_PERMISSIONS_NONE: HvMapGpaFlags = HvMapGpaFlags::new();
3571pub const HV_MAP_GPA_PERMISSIONS_ALL: HvMapGpaFlags = HvMapGpaFlags::new()
3572 .with_readable(true)
3573 .with_writable(true)
3574 .with_kernel_executable(true)
3575 .with_user_executable(true);
3576
3577#[repr(C)]
3578#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3579pub struct HvMonitorPage {
3580 pub trigger_state: HvMonitorTriggerState,
3581 pub reserved1: u32,
3582 pub trigger_group: [HvMonitorTriggerGroup; 4],
3583 pub reserved2: [u64; 3],
3584 pub next_check_time: [[u32; 32]; 4],
3585 pub latency: [[u16; 32]; 4],
3586 pub reserved3: [u64; 32],
3587 pub parameter: [[HvMonitorParameter; 32]; 4],
3588 pub reserved4: [u8; 1984],
3589}
3590
3591#[repr(C)]
3592#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3593pub struct HvMonitorPageSmall {
3594 pub trigger_state: HvMonitorTriggerState,
3595 pub reserved1: u32,
3596 pub trigger_group: [HvMonitorTriggerGroup; 4],
3597}
3598
3599#[repr(C)]
3600#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3601pub struct HvMonitorTriggerGroup {
3602 pub pending: u32,
3603 pub armed: u32,
3604}
3605
3606#[repr(C)]
3607#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3608pub struct HvMonitorParameter {
3609 pub connection_id: u32,
3610 pub flag_number: u16,
3611 pub reserved: u16,
3612}
3613
3614#[bitfield(u32)]
3615#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3616pub struct HvMonitorTriggerState {
3617 #[bits(4)]
3618 pub group_enable: u32,
3619 #[bits(28)]
3620 pub reserved: u32,
3621}
3622
3623#[bitfield(u64)]
3624#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3625pub struct HvPmTimerInfo {
3626 #[bits(16)]
3627 pub port: u16,
3628 #[bits(1)]
3629 pub width_24: bool,
3630 #[bits(1)]
3631 pub enabled: bool,
3632 #[bits(14)]
3633 pub reserved1: u32,
3634 #[bits(32)]
3635 pub reserved2: u32,
3636}
3637
3638#[bitfield(u64)]
3639#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3640pub struct HvX64RegisterSevControl {
3641 pub enable_encrypted_state: bool,
3642 #[bits(11)]
3643 _rsvd1: u64,
3644 #[bits(52)]
3645 pub vmsa_gpa_page_number: u64,
3646}
3647
3648#[bitfield(u64)]
3649#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3650pub struct HvRegisterReferenceTsc {
3651 pub enable: bool,
3652 #[bits(11)]
3653 pub reserved_p: u64,
3654 #[bits(52)]
3655 pub gpn: u64,
3656}
3657
3658#[repr(C)]
3659#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3660pub struct HvReferenceTscPage {
3661 pub tsc_sequence: u32,
3662 pub reserved1: u32,
3663 pub tsc_scale: u64,
3664 pub tsc_offset: i64,
3665 pub timeline_bias: u64,
3666 pub tsc_multiplier: u64,
3667 pub reserved2: [u64; 507],
3668}
3669
3670pub const HV_REFERENCE_TSC_SEQUENCE_INVALID: u32 = 0;
3671
3672#[bitfield(u64)]
3673#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3674pub struct HvX64VmgexitInterceptMessageFlags {
3675 pub ghcb_page_valid: bool,
3676 pub ghcb_request_error: bool,
3677 #[bits(62)]
3678 _reserved: u64,
3679}
3680
3681#[repr(C)]
3682#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3683pub struct HvX64VmgexitInterceptMessageGhcbPageStandard {
3684 pub ghcb_protocol_version: u16,
3685 _reserved: [u16; 3],
3686 pub sw_exit_code: u64,
3687 pub sw_exit_info1: u64,
3688 pub sw_exit_info2: u64,
3689 pub sw_scratch: u64,
3690}
3691
3692#[repr(C)]
3693#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3694pub struct HvX64VmgexitInterceptMessageGhcbPage {
3695 pub ghcb_usage: u32,
3696 _reserved: u32,
3697 pub standard: HvX64VmgexitInterceptMessageGhcbPageStandard,
3698}
3699
3700#[repr(C)]
3701#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3702pub struct HvX64VmgexitInterceptMessage {
3703 pub header: HvX64InterceptMessageHeader,
3704 pub ghcb_msr: u64,
3705 pub flags: HvX64VmgexitInterceptMessageFlags,
3706 pub ghcb_page: HvX64VmgexitInterceptMessageGhcbPage,
3707}
3708
3709impl MessagePayload for HvX64VmgexitInterceptMessage {}
3710
3711#[bitfield(u64)]
3712pub struct HvRegisterVpAssistPage {
3713 pub enabled: bool,
3714 #[bits(11)]
3715 _reserved: u64,
3716 #[bits(52)]
3717 pub gpa_page_number: u64,
3718}
3719
3720#[bitfield(u32)]
3721#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3722pub struct HvX64RegisterPageDirtyFlags {
3723 pub general_purpose: bool,
3724 pub instruction_pointer: bool,
3725 pub xmm: bool,
3726 pub segments: bool,
3727 pub flags: bool,
3728 #[bits(27)]
3729 reserved: u32,
3730}
3731
3732#[repr(C)]
3733#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3734pub struct HvX64RegisterPage {
3735 pub version: u16,
3736 pub is_valid: u8,
3737 pub vtl: u8,
3738 pub dirty: HvX64RegisterPageDirtyFlags,
3739 pub gp_registers: [u64; 16],
3740 pub rip: u64,
3741 pub rflags: u64,
3742 pub reserved: u64,
3743 pub xmm: [u128; 6],
3744 pub segment: [u128; 6],
3745 pub cr0: u64,
3747 pub cr3: u64,
3748 pub cr4: u64,
3749 pub cr8: u64,
3750 pub efer: u64,
3751 pub dr7: u64,
3752 pub pending_interruption: HvX64PendingInterruptionRegister,
3753 pub interrupt_state: HvX64InterruptStateRegister,
3754 pub instruction_emulation_hints: HvInstructionEmulatorHintsRegister,
3755 pub reserved_end: [u8; 3672],
3756}
3757
3758const _: () = assert!(size_of::<HvX64RegisterPage>() == HV_PAGE_SIZE_USIZE);
3759
3760#[bitfield(u32)]
3761#[derive(IntoBytes, Immutable, KnownLayout, FromBytes)]
3762pub struct HvAarch64RegisterPageDirtyFlags {
3763 _unused: bool,
3764 pub instruction_pointer: bool,
3765 pub processor_state: bool,
3766 pub control_registers: bool,
3767 #[bits(28)]
3768 reserved: u32,
3769}
3770
3771#[repr(C)]
3772#[derive(Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
3773pub struct HvAarch64RegisterPage {
3774 pub version: u16,
3775 pub is_valid: u8,
3776 pub vtl: u8,
3777 pub dirty: HvAarch64RegisterPageDirtyFlags,
3778 pub _rsvd: [u64; 33],
3780 pub pc: u64,
3782 pub cpsr: u64,
3784 pub sctlr_el1: u64,
3786 pub tcr_el1: u64,
3787 pub reserved_end: [u8; 3792],
3789}
3790
3791const _: () = assert!(size_of::<HvAarch64RegisterPage>() == HV_PAGE_SIZE_USIZE);
3792
3793#[bitfield(u64)]
3794pub struct HvRegisterVsmWpWaitForTlbLock {
3795 pub wait: bool,
3796 #[bits(63)]
3797 _reserved: u64,
3798}
3799
3800#[bitfield(u64)]
3801pub struct HvRegisterVsmVpSecureVtlConfig {
3802 pub mbec_enabled: bool,
3803 pub tlb_locked: bool,
3804 pub supervisor_shadow_stack_enabled: bool,
3805 pub hardware_hvpt_enabled: bool,
3806 #[bits(60)]
3807 _reserved: u64,
3808}
3809
3810#[bitfield(u64)]
3811pub struct HvRegisterCrInterceptControl {
3812 pub cr0_write: bool,
3813 pub cr4_write: bool,
3814 pub xcr0_write: bool,
3815 pub ia32_misc_enable_read: bool,
3816 pub ia32_misc_enable_write: bool,
3817 pub msr_lstar_read: bool,
3818 pub msr_lstar_write: bool,
3819 pub msr_star_read: bool,
3820 pub msr_star_write: bool,
3821 pub msr_cstar_read: bool,
3822 pub msr_cstar_write: bool,
3823 pub apic_base_msr_read: bool,
3824 pub apic_base_msr_write: bool,
3825 pub msr_efer_read: bool,
3826 pub msr_efer_write: bool,
3827 pub gdtr_write: bool,
3828 pub idtr_write: bool,
3829 pub ldtr_write: bool,
3830 pub tr_write: bool,
3831 pub msr_sysenter_cs_write: bool,
3832 pub msr_sysenter_eip_write: bool,
3833 pub msr_sysenter_esp_write: bool,
3834 pub msr_sfmask_write: bool,
3835 pub msr_tsc_aux_write: bool,
3836 pub msr_sgx_launch_control_write: bool,
3837 pub msr_xss_write: bool,
3838 pub msr_scet_write: bool,
3839 pub msr_pls_ssp_write: bool,
3840 pub msr_interrupt_ssp_table_addr_write: bool,
3841 #[bits(35)]
3842 _rsvd_z: u64,
3843}