pub struct HvX64PendingEventMemoryInterceptAccessFlags(/* private fields */);
Expand description
Part of HvX64PendingEventMemoryIntercept
Implementations§
Source§impl HvX64PendingEventMemoryInterceptAccessFlags
impl HvX64PendingEventMemoryInterceptAccessFlags
Sourcepub const fn guest_linear_address_valid(&self) -> bool
pub const fn guest_linear_address_valid(&self) -> bool
Indicates if the guest linear address is valid.
Bits: 0..1
Sourcepub const fn with_guest_linear_address_valid_checked(
self,
value: bool,
) -> Result<Self, ()>
pub const fn with_guest_linear_address_valid_checked( self, value: bool, ) -> Result<Self, ()>
Indicates if the guest linear address is valid.
Bits: 0..1
Sourcepub const fn with_guest_linear_address_valid(self, value: bool) -> Self
pub const fn with_guest_linear_address_valid(self, value: bool) -> Self
Indicates if the guest linear address is valid.
Bits: 0..1
Sourcepub const fn set_guest_linear_address_valid(&mut self, value: bool)
pub const fn set_guest_linear_address_valid(&mut self, value: bool)
Indicates if the guest linear address is valid.
Bits: 0..1
Sourcepub const fn set_guest_linear_address_valid_checked(
&mut self,
value: bool,
) -> Result<(), ()>
pub const fn set_guest_linear_address_valid_checked( &mut self, value: bool, ) -> Result<(), ()>
Indicates if the guest linear address is valid.
Bits: 0..1
Sourcepub const fn caused_by_gpa_access(&self) -> bool
pub const fn caused_by_gpa_access(&self) -> bool
Indicates that the memory intercept was caused by an access to a guest physical address (instead of a page table as part of a page table walk).
Bits: 1..2
Sourcepub const fn with_caused_by_gpa_access_checked(
self,
value: bool,
) -> Result<Self, ()>
pub const fn with_caused_by_gpa_access_checked( self, value: bool, ) -> Result<Self, ()>
Indicates that the memory intercept was caused by an access to a guest physical address (instead of a page table as part of a page table walk).
Bits: 1..2
Sourcepub const fn with_caused_by_gpa_access(self, value: bool) -> Self
pub const fn with_caused_by_gpa_access(self, value: bool) -> Self
Indicates that the memory intercept was caused by an access to a guest physical address (instead of a page table as part of a page table walk).
Bits: 1..2
Sourcepub const fn set_caused_by_gpa_access(&mut self, value: bool)
pub const fn set_caused_by_gpa_access(&mut self, value: bool)
Indicates that the memory intercept was caused by an access to a guest physical address (instead of a page table as part of a page table walk).
Bits: 1..2
Trait Implementations§
Source§impl Clone for HvX64PendingEventMemoryInterceptAccessFlags
impl Clone for HvX64PendingEventMemoryInterceptAccessFlags
Source§fn clone(&self) -> HvX64PendingEventMemoryInterceptAccessFlags
fn clone(&self) -> HvX64PendingEventMemoryInterceptAccessFlags
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source
. Read moreSource§impl From<HvX64PendingEventMemoryInterceptAccessFlags> for u8
impl From<HvX64PendingEventMemoryInterceptAccessFlags> for u8
Source§fn from(v: HvX64PendingEventMemoryInterceptAccessFlags) -> u8
fn from(v: HvX64PendingEventMemoryInterceptAccessFlags) -> u8
Source§impl FromBytes for HvX64PendingEventMemoryInterceptAccessFlagswhere
u8: FromBytes,
impl FromBytes for HvX64PendingEventMemoryInterceptAccessFlagswhere
u8: FromBytes,
§fn ref_from_bytes(
source: &[u8],
) -> Result<&Self, ConvertError<AlignmentError<&[u8], Self>, SizeError<&[u8], Self>, Infallible>>where
Self: KnownLayout + Immutable,
fn ref_from_bytes(
source: &[u8],
) -> Result<&Self, ConvertError<AlignmentError<&[u8], Self>, SizeError<&[u8], Self>, Infallible>>where
Self: KnownLayout + Immutable,
§fn ref_from_prefix(
source: &[u8],
) -> Result<(&Self, &[u8]), ConvertError<AlignmentError<&[u8], Self>, SizeError<&[u8], Self>, Infallible>>where
Self: KnownLayout + Immutable,
fn ref_from_prefix(
source: &[u8],
) -> Result<(&Self, &[u8]), ConvertError<AlignmentError<&[u8], Self>, SizeError<&[u8], Self>, Infallible>>where
Self: KnownLayout + Immutable,
§fn ref_from_suffix(
source: &[u8],
) -> Result<(&[u8], &Self), ConvertError<AlignmentError<&[u8], Self>, SizeError<&[u8], Self>, Infallible>>where
Self: Immutable + KnownLayout,
fn ref_from_suffix(
source: &[u8],
) -> Result<(&[u8], &Self), ConvertError<AlignmentError<&[u8], Self>, SizeError<&[u8], Self>, Infallible>>where
Self: Immutable + KnownLayout,
&Self
. Read more§fn mut_from_bytes(
source: &mut [u8],
) -> Result<&mut Self, ConvertError<AlignmentError<&mut [u8], Self>, SizeError<&mut [u8], Self>, Infallible>>where
Self: IntoBytes + KnownLayout,
fn mut_from_bytes(
source: &mut [u8],
) -> Result<&mut Self, ConvertError<AlignmentError<&mut [u8], Self>, SizeError<&mut [u8], Self>, Infallible>>where
Self: IntoBytes + KnownLayout,
§fn mut_from_prefix(
source: &mut [u8],
) -> Result<(&mut Self, &mut [u8]), ConvertError<AlignmentError<&mut [u8], Self>, SizeError<&mut [u8], Self>, Infallible>>where
Self: IntoBytes + KnownLayout,
fn mut_from_prefix(
source: &mut [u8],
) -> Result<(&mut Self, &mut [u8]), ConvertError<AlignmentError<&mut [u8], Self>, SizeError<&mut [u8], Self>, Infallible>>where
Self: IntoBytes + KnownLayout,
§fn mut_from_suffix(
source: &mut [u8],
) -> Result<(&mut [u8], &mut Self), ConvertError<AlignmentError<&mut [u8], Self>, SizeError<&mut [u8], Self>, Infallible>>where
Self: IntoBytes + KnownLayout,
fn mut_from_suffix(
source: &mut [u8],
) -> Result<(&mut [u8], &mut Self), ConvertError<AlignmentError<&mut [u8], Self>, SizeError<&mut [u8], Self>, Infallible>>where
Self: IntoBytes + KnownLayout,
Source§impl FromZeros for HvX64PendingEventMemoryInterceptAccessFlagswhere
u8: FromZeros,
impl FromZeros for HvX64PendingEventMemoryInterceptAccessFlagswhere
u8: FromZeros,
§fn new_zeroed() -> Selfwhere
Self: Sized,
fn new_zeroed() -> Selfwhere
Self: Sized,
Self
from zeroed bytes. Read more§fn new_box_zeroed() -> Result<Box<Self>, AllocError>where
Self: Sized,
fn new_box_zeroed() -> Result<Box<Self>, AllocError>where
Self: Sized,
Box<Self>
from zeroed bytes. Read more§fn new_vec_zeroed(len: usize) -> Result<Vec<Self>, AllocError>where
Self: Sized,
fn new_vec_zeroed(len: usize) -> Result<Vec<Self>, AllocError>where
Self: Sized,
Vec<Self>
from zeroed bytes. Read moreSource§impl IntoBytes for HvX64PendingEventMemoryInterceptAccessFlagswhere
u8: IntoBytes,
impl IntoBytes for HvX64PendingEventMemoryInterceptAccessFlagswhere
u8: IntoBytes,
§fn as_mut_bytes(&mut self) -> &mut [u8]where
Self: FromBytes,
fn as_mut_bytes(&mut self) -> &mut [u8]where
Self: FromBytes,
§fn write_to(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>>where
Self: Immutable,
fn write_to(&self, dst: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>>where
Self: Immutable,
Source§impl KnownLayout for HvX64PendingEventMemoryInterceptAccessFlagswhere
Self: Sized,
impl KnownLayout for HvX64PendingEventMemoryInterceptAccessFlagswhere
Self: Sized,
Source§type PointerMetadata = ()
type PointerMetadata = ()
Self
. Read more