openhcl_boot/arch/x86_64/
snp.rsuse super::address_space::LocalMap;
use core::arch::asm;
use memory_range::MemoryRange;
use minimal_rt::arch::msr::read_msr;
use minimal_rt::arch::msr::write_msr;
use x86defs::X86X_AMD_MSR_GHCB;
use x86defs::snp::GhcbInfo;
pub struct Ghcb;
#[derive(Debug)]
pub enum AcceptGpaStatus {
Success,
Retry,
}
#[expect(dead_code)] #[derive(Debug)]
pub enum AcceptGpaError {
MemorySecurityViolation {
error_code: u32,
carry_flag: u32,
page_number: u64,
large_page: bool,
validate: bool,
},
Unknown,
}
impl Ghcb {
fn sev_vmgexit() {
unsafe {
asm! {r#"
rep vmmcall
"#
}
}
}
pub fn change_page_visibility(range: MemoryRange, host_visible: bool) {
let previous_value = unsafe { read_msr(X86X_AMD_MSR_GHCB) };
for page_number in range.start_4k_gpn()..range.end_4k_gpn() {
let extra_data = if host_visible {
x86defs::snp::GHCB_DATA_PAGE_STATE_SHARED
} else {
x86defs::snp::GHCB_DATA_PAGE_STATE_PRIVATE
};
let val = (extra_data << 52) | (page_number << 12) | GhcbInfo::PAGE_STATE_CHANGE.0;
let val = unsafe {
write_msr(X86X_AMD_MSR_GHCB, val);
Self::sev_vmgexit();
read_msr(X86X_AMD_MSR_GHCB)
};
assert!(
val == GhcbInfo::PAGE_STATE_UPDATED.0,
"GhcbInfo::PAGE_STATE_UPDATED returned msr value {val}"
);
}
unsafe { write_msr(X86X_AMD_MSR_GHCB, previous_value) };
}
}
fn pvalidate(
page_number: u64,
va: u64,
large_page: bool,
validate: bool,
) -> Result<AcceptGpaStatus, AcceptGpaError> {
if large_page {
assert!(va % x86defs::X64_LARGE_PAGE_SIZE == 0);
} else {
assert!(va % hvdef::HV_PAGE_SIZE == 0)
}
let validate_page = validate as u32;
let page_size = large_page as u32;
let mut error_code: u32;
let mut carry_flag: u32 = 0;
unsafe {
asm!(r#"
pvalidate
jnc 2f
inc {carry_flag:e}
2:
"#,
in("rax") va,
in("ecx") page_size,
in("edx") validate_page,
lateout("eax") error_code,
carry_flag = inout(reg) carry_flag);
}
const SEV_SUCCESS: u32 = 0;
const SEV_FAIL_SIZEMISMATCH: u32 = 6;
match (error_code, carry_flag) {
(SEV_SUCCESS, 0) => Ok(AcceptGpaStatus::Success),
(SEV_FAIL_SIZEMISMATCH, _) => Ok(AcceptGpaStatus::Retry),
_ => Err(AcceptGpaError::MemorySecurityViolation {
error_code,
carry_flag,
page_number,
large_page,
validate,
}),
}
}
pub fn set_page_acceptance(
local_map: &mut LocalMap<'_>,
range: MemoryRange,
validate: bool,
) -> Result<(), AcceptGpaError> {
let pages_per_large_page = x86defs::X64_LARGE_PAGE_SIZE / hvdef::HV_PAGE_SIZE;
let mut page_count = range.page_count_4k();
let mut page_base = range.start_4k_gpn();
while page_count != 0 {
let mapping = local_map.map_pages(
MemoryRange::from_4k_gpn_range(page_base..page_base + 1),
true,
);
if page_base % pages_per_large_page == 0 && page_count >= pages_per_large_page {
let res = pvalidate(page_base, mapping.data.as_ptr() as u64, true, validate)?;
match res {
AcceptGpaStatus::Success => {
page_count -= pages_per_large_page;
page_base += pages_per_large_page;
continue;
}
AcceptGpaStatus::Retry => (),
}
}
let res = pvalidate(page_base, mapping.data.as_ptr() as u64, false, validate)?;
match res {
AcceptGpaStatus::Success => {
page_count -= 1;
page_base += 1;
}
AcceptGpaStatus::Retry => {
return Err(AcceptGpaError::Unknown);
}
}
}
Ok(())
}