1#![expect(unsafe_code)]
6
7use crate::MshvVtlWithPolicy;
8use crate::RegistrationError;
9use crate::registrar::MemoryRegistrar;
10use guestmem::GuestMemoryAccess;
11use guestmem::GuestMemoryBackingError;
12use guestmem::PAGE_SIZE;
13use hcl::GuestVtl;
14use hcl::ioctl::Mshv;
15use hcl::ioctl::MshvVtlLow;
16use hvdef::HvMapGpaFlags;
17use inspect::Inspect;
18use memory_range::MemoryRange;
19use parking_lot::Mutex;
20use sparse_mmap::SparseMapping;
21use std::ptr::NonNull;
22use std::sync::Arc;
23use thiserror::Error;
24use virt_mshv_vtl::ProtectIsolatedMemory;
25use vm_topology::memory::MemoryLayout;
26
27pub struct GuestPartitionMemoryView<'a> {
28 memory_layout: &'a MemoryLayout,
29 valid_memory: Arc<GuestValidMemory>,
30}
31
32impl<'a> GuestPartitionMemoryView<'a> {
33 pub fn new(
39 memory_layout: &'a MemoryLayout,
40 memory_type: GuestValidMemoryType,
41 valid_bitmap_state: bool,
42 ) -> Result<Self, MappingError> {
43 let valid_memory =
44 GuestValidMemory::new(memory_layout, memory_type, valid_bitmap_state).map(Arc::new)?;
45 Ok(Self {
46 memory_layout,
47 valid_memory,
48 })
49 }
50
51 pub fn partition_valid_memory(&self) -> Arc<GuestValidMemory> {
53 self.valid_memory.clone()
54 }
55
56 fn build_guest_memory_mapping(
59 &self,
60 mshv_vtl_low: &MshvVtlLow,
61 memory_mapping_builder: &mut GuestMemoryMappingBuilder,
62 ) -> Result<GuestMemoryMapping, MappingError> {
63 memory_mapping_builder
64 .use_partition_valid_memory(Some(self.valid_memory.clone()))
65 .build(mshv_vtl_low, self.memory_layout)
66 }
67}
68
69#[derive(Debug, Inspect)]
70pub enum GuestMemoryViewReadType {
71 Read,
72 KernelExecute,
73 UserExecute,
74}
75
76#[derive(Inspect)]
77pub struct GuestMemoryView {
78 #[inspect(skip)]
79 protector: Option<Arc<dyn ProtectIsolatedMemory>>,
80 pub memory_mapping: Arc<GuestMemoryMapping>,
81 pub view_type: GuestMemoryViewReadType,
82 vtl: GuestVtl,
83}
84
85impl GuestMemoryView {
86 pub fn new(
87 protector: Option<Arc<dyn ProtectIsolatedMemory>>,
88 memory_mapping: Arc<GuestMemoryMapping>,
89 view_type: GuestMemoryViewReadType,
90 vtl: GuestVtl,
91 ) -> Self {
92 Self {
93 protector,
94 memory_mapping,
95 view_type,
96 vtl,
97 }
98 }
99}
100
101#[derive(Error, Debug)]
102#[error("the specified page is not mapped")]
103struct NotMapped;
104
105#[derive(Error, Debug)]
106enum BitmapFailure {
107 #[error("the specified page was accessed using the wrong visibility mapping")]
108 IncorrectHostVisibilityAccess,
109 #[error("the specified page access violates VTL 1 protections")]
110 Vtl1ProtectionsViolation,
111}
112
113unsafe impl GuestMemoryAccess for GuestMemoryView {
116 fn mapping(&self) -> Option<NonNull<u8>> {
117 NonNull::new(self.memory_mapping.mapping.as_ptr().cast())
118 }
119
120 fn max_address(&self) -> u64 {
121 self.memory_mapping.mapping.len() as u64
122 }
123
124 fn expose_va(&self, address: u64, len: u64) -> Result<(), GuestMemoryBackingError> {
125 if let Some(registrar) = &self.memory_mapping.registrar {
126 registrar
127 .register(address, len)
128 .map_err(|start| GuestMemoryBackingError::other(start, RegistrationError))
129 } else {
130 Ok(())
135 }
136 }
137
138 fn base_iova(&self) -> Option<u64> {
139 self.memory_mapping.iova_offset
143 }
144
145 fn access_bitmap(&self) -> Option<guestmem::BitmapInfo> {
146 if let Some(bitmaps) = self.memory_mapping.permission_bitmaps.as_ref() {
164 match self.view_type {
165 GuestMemoryViewReadType::Read => Some(guestmem::BitmapInfo {
166 read_bitmap: NonNull::new(bitmaps.read_bitmap.as_ptr().cast()).unwrap(),
167 write_bitmap: NonNull::new(bitmaps.write_bitmap.as_ptr().cast()).unwrap(),
168 bit_offset: 0,
169 }),
170 GuestMemoryViewReadType::KernelExecute => Some(guestmem::BitmapInfo {
171 read_bitmap: NonNull::new(bitmaps.kernel_execute_bitmap.as_ptr().cast())
172 .unwrap(),
173 write_bitmap: NonNull::new(bitmaps.write_bitmap.as_ptr().cast()).unwrap(),
174 bit_offset: 0,
175 }),
176 GuestMemoryViewReadType::UserExecute => Some(guestmem::BitmapInfo {
177 read_bitmap: NonNull::new(bitmaps.user_execute_bitmap.as_ptr().cast()).unwrap(),
178 write_bitmap: NonNull::new(bitmaps.write_bitmap.as_ptr().cast()).unwrap(),
179 bit_offset: 0,
180 }),
181 }
182 } else {
183 self.memory_mapping
184 .valid_memory
185 .as_ref()
186 .map(|bitmap| bitmap.access_bitmap())
187 }
188 }
189
190 fn page_fault(
191 &self,
192 address: u64,
193 len: usize,
194 write: bool,
195 bitmap_failure: bool,
196 ) -> guestmem::PageFaultAction {
197 let gpn = address / PAGE_SIZE as u64;
198 if !bitmap_failure {
199 guestmem::PageFaultAction::Fail(guestmem::PageFaultError::other(NotMapped {}))
200 } else {
201 let valid_memory = self
202 .memory_mapping
203 .valid_memory
204 .as_ref()
205 .expect("all backings with bitmaps should have a GuestValidMemory");
206 if !valid_memory.check_valid(gpn) {
207 match valid_memory.memory_type() {
208 GuestValidMemoryType::Shared => {
209 tracing::warn!(
210 ?address,
211 ?len,
212 ?write,
213 "tried to access private page using shared mapping"
214 );
215 guestmem::PageFaultAction::Fail(guestmem::PageFaultError::new(
216 guestmem::GuestMemoryErrorKind::NotShared,
217 BitmapFailure::IncorrectHostVisibilityAccess,
218 ))
219 }
220 GuestValidMemoryType::Encrypted => {
221 tracing::warn!(
222 ?address,
223 ?len,
224 ?write,
225 "tried to access shared page using private mapping"
226 );
227 guestmem::PageFaultAction::Fail(guestmem::PageFaultError::new(
228 guestmem::GuestMemoryErrorKind::NotPrivate,
229 BitmapFailure::IncorrectHostVisibilityAccess,
230 ))
231 }
232 }
233 } else {
234 if let Some(permission_bitmaps) = &self.memory_mapping.permission_bitmaps {
237 let check_bitmap = if write {
238 &permission_bitmaps.write_bitmap
239 } else {
240 match self.view_type {
241 GuestMemoryViewReadType::Read => &permission_bitmaps.read_bitmap,
242 GuestMemoryViewReadType::KernelExecute => {
243 &permission_bitmaps.kernel_execute_bitmap
244 }
245 GuestMemoryViewReadType::UserExecute => {
246 &permission_bitmaps.user_execute_bitmap
247 }
248 }
249 };
250
251 if !check_bitmap.page_state(gpn) {
252 tracing::warn!(?address, ?len, ?write, ?self.view_type, "VTL 1 permissions violation");
253
254 return guestmem::PageFaultAction::Fail(guestmem::PageFaultError::new(
255 guestmem::GuestMemoryErrorKind::VtlProtected,
256 BitmapFailure::Vtl1ProtectionsViolation,
257 ));
258 }
259 }
260
261 guestmem::PageFaultAction::Retry
265 }
266 }
267 }
268
269 fn lock_gpns(&self, gpns: &[u64]) -> Result<bool, GuestMemoryBackingError> {
270 if let Some(protector) = self.protector.as_ref() {
271 protector.lock_gpns(self.vtl, gpns)?;
272 Ok(true)
273 } else {
274 Ok(false)
275 }
276 }
277
278 fn unlock_gpns(&self, gpns: &[u64]) {
279 if let Some(protector) = self.protector.as_ref() {
280 protector.unlock_gpns(self.vtl, gpns)
281 }
282 }
283}
284
285#[derive(Debug, Copy, Clone)]
286pub enum GuestValidMemoryType {
287 Shared,
288 Encrypted,
289}
290
291#[derive(Debug)]
294pub struct GuestValidMemory {
295 valid_bitmap: GuestMemoryBitmap,
296 valid_bitmap_lock: Mutex<()>,
297 memory_type: GuestValidMemoryType,
298}
299
300impl GuestValidMemory {
301 fn new(
302 memory_layout: &MemoryLayout,
303 memory_type: GuestValidMemoryType,
304 valid_bitmap_state: bool,
305 ) -> Result<Self, MappingError> {
306 let valid_bitmap = {
307 let mut bitmap = {
308 let last_entry = memory_layout
310 .ram()
311 .last()
312 .expect("memory map must have at least 1 entry");
313 let address_space_size = last_entry.range.end();
314 GuestMemoryBitmap::new(address_space_size as usize)?
315 };
316
317 for entry in memory_layout.ram() {
318 if entry.range.is_empty() {
319 continue;
320 }
321
322 bitmap.init(entry.range, valid_bitmap_state)?;
323 }
324
325 bitmap
326 };
327
328 Ok(GuestValidMemory {
329 valid_bitmap,
330 valid_bitmap_lock: Default::default(),
331 memory_type,
332 })
333 }
334
335 pub fn update_valid(&self, range: MemoryRange, state: bool) {
337 let _lock = self.valid_bitmap_lock.lock();
338 self.valid_bitmap.update(range, state);
339 }
340
341 pub(crate) fn check_valid(&self, gpn: u64) -> bool {
343 self.valid_bitmap.page_state(gpn)
344 }
345
346 pub(crate) fn memory_type(&self) -> GuestValidMemoryType {
348 self.memory_type
349 }
350
351 fn access_bitmap(&self) -> guestmem::BitmapInfo {
352 let ptr = NonNull::new(self.valid_bitmap.as_ptr()).unwrap();
353 guestmem::BitmapInfo {
354 read_bitmap: ptr,
355 write_bitmap: ptr,
356 bit_offset: 0,
357 }
358 }
359}
360
361#[derive(Debug, Inspect)]
363pub struct GuestMemoryMapping {
364 #[inspect(skip)]
365 mapping: SparseMapping,
366 iova_offset: Option<u64>,
367 #[inspect(with = "Option::is_some")]
368 valid_memory: Option<Arc<GuestValidMemory>>,
369 #[inspect(with = "Option::is_some")]
370 permission_bitmaps: Option<PermissionBitmaps>,
371 registrar: Option<MemoryRegistrar<MshvVtlWithPolicy>>,
372}
373
374#[derive(Debug)]
377struct PermissionBitmaps {
378 permission_update_lock: Mutex<()>,
379 read_bitmap: GuestMemoryBitmap,
380 write_bitmap: GuestMemoryBitmap,
381 kernel_execute_bitmap: GuestMemoryBitmap,
382 user_execute_bitmap: GuestMemoryBitmap,
383}
384
385#[derive(Error, Debug)]
386pub enum VtlPermissionsError {
387 #[error("no vtl 1 permissions enforcement, bitmap is not present")]
388 NoPermissionsTracked,
389}
390
391#[derive(Debug)]
392struct GuestMemoryBitmap {
393 bitmap: SparseMapping,
394}
395
396impl GuestMemoryBitmap {
397 fn new(address_space_size: usize) -> Result<Self, MappingError> {
398 let bitmap = SparseMapping::new((address_space_size / PAGE_SIZE).div_ceil(8))
399 .map_err(MappingError::BitmapReserve)?;
400 bitmap
401 .map_zero(0, bitmap.len())
402 .map_err(MappingError::BitmapMap)?;
403 Ok(Self { bitmap })
404 }
405
406 fn init(&mut self, range: MemoryRange, state: bool) -> Result<(), MappingError> {
407 if range.start() % (PAGE_SIZE as u64 * 8) != 0 || range.end() % (PAGE_SIZE as u64 * 8) != 0
408 {
409 return Err(MappingError::BadAlignment(range));
410 }
411
412 let bitmap_start = range.start() as usize / PAGE_SIZE / 8;
413 let bitmap_end = (range.end() - 1) as usize / PAGE_SIZE / 8;
414 let bitmap_page_start = bitmap_start / PAGE_SIZE;
415 let bitmap_page_end = bitmap_end / PAGE_SIZE;
416 let page_count = bitmap_page_end + 1 - bitmap_page_start;
417
418 self.bitmap
422 .alloc(bitmap_page_start * PAGE_SIZE, page_count * PAGE_SIZE)
423 .map_err(MappingError::BitmapAlloc)?;
424
425 if state {
427 let start_gpn = range.start() / PAGE_SIZE as u64;
428 let gpn_count = range.len() / PAGE_SIZE as u64;
429 assert_eq!(range.start() % 8, 0);
430 assert_eq!(gpn_count % 8, 0);
431 self.bitmap
432 .fill_at(start_gpn as usize / 8, 0xff, gpn_count as usize / 8)
433 .unwrap();
434 }
435
436 Ok(())
437 }
438
439 fn update(&self, range: MemoryRange, state: bool) {
441 for gpn in range.start() / PAGE_SIZE as u64..range.end() / PAGE_SIZE as u64 {
442 let mut b = 0;
444 self.bitmap
445 .read_at(gpn as usize / 8, std::slice::from_mut(&mut b))
446 .unwrap();
447 if state {
448 b |= 1 << (gpn % 8);
449 } else {
450 b &= !(1 << (gpn % 8));
451 }
452 self.bitmap
453 .write_at(gpn as usize / 8, std::slice::from_ref(&b))
454 .unwrap();
455 }
456 }
457
458 fn page_state(&self, gpn: u64) -> bool {
461 let mut b = 0;
462 self.bitmap
463 .read_at(gpn as usize / 8, std::slice::from_mut(&mut b))
464 .unwrap();
465 b & (1 << (gpn % 8)) != 0
466 }
467
468 fn as_ptr(&self) -> *mut u8 {
469 self.bitmap.as_ptr().cast()
470 }
471}
472
473#[derive(Debug, Error)]
475pub enum MappingError {
476 #[error("failed to allocate VA space for guest memory")]
477 Reserve(#[source] std::io::Error),
478 #[error("failed to map guest memory pages")]
479 Map(#[source] std::io::Error),
480 #[error("failed to allocate VA space for bitmap")]
481 BitmapReserve(#[source] std::io::Error),
482 #[error("failed to map zero pages for bitmap")]
483 BitmapMap(#[source] std::io::Error),
484 #[error("failed to allocate pages for bitmap")]
485 BitmapAlloc(#[source] std::io::Error),
486 #[error("memory map entry {0} has insufficient alignment to support a bitmap")]
487 BadAlignment(MemoryRange),
488 #[error("failed to open device")]
489 OpenDevice(#[source] hcl::ioctl::Error),
490}
491
492pub struct GuestMemoryMappingBuilder {
494 physical_address_base: u64,
495 valid_memory: Option<Arc<GuestValidMemory>>,
496 permissions_bitmap_state: Option<bool>,
497 shared: bool,
498 for_kernel_access: bool,
499 dma_base_address: Option<u64>,
500 ignore_registration_failure: bool,
501}
502
503impl GuestMemoryMappingBuilder {
504 fn use_partition_valid_memory(
505 &mut self,
506 valid_memory: Option<Arc<GuestValidMemory>>,
507 ) -> &mut Self {
508 self.valid_memory = valid_memory;
509 self
510 }
511
512 pub fn use_permissions_bitmaps(&mut self, initial_state: Option<bool>) -> &mut Self {
518 self.permissions_bitmap_state = initial_state;
519 self
520 }
521
522 pub fn shared(&mut self, is_shared: bool) -> &mut Self {
524 self.shared = is_shared;
525 self
526 }
527
528 pub fn for_kernel_access(&mut self, for_kernel_access: bool) -> &mut Self {
533 self.for_kernel_access = for_kernel_access;
534 self
535 }
536
537 pub fn dma_base_address(&mut self, dma_base_address: Option<u64>) -> &mut Self {
549 self.dma_base_address = dma_base_address;
550 self
551 }
552
553 pub fn ignore_registration_failure(&mut self, ignore: bool) -> &mut Self {
566 self.ignore_registration_failure = ignore;
567 self
568 }
569
570 pub fn build_with_bitmap(
573 &mut self,
574 mshv_vtl_low: &MshvVtlLow,
575 partition_builder: &GuestPartitionMemoryView<'_>,
576 ) -> Result<GuestMemoryMapping, MappingError> {
577 partition_builder.build_guest_memory_mapping(mshv_vtl_low, self)
578 }
579
580 pub fn build_without_bitmap(
581 &self,
582 mshv_vtl_low: &MshvVtlLow,
583 memory_layout: &MemoryLayout,
584 ) -> Result<GuestMemoryMapping, MappingError> {
585 self.build(mshv_vtl_low, memory_layout)
586 }
587
588 fn build(
600 &self,
601 mshv_vtl_low: &MshvVtlLow,
602 memory_layout: &MemoryLayout,
603 ) -> Result<GuestMemoryMapping, MappingError> {
604 let file_starting_offset = self.physical_address_base
606 | if self.shared {
607 MshvVtlLow::SHARED_MEMORY_FLAG
608 } else {
609 0
610 };
611
612 let last_entry = memory_layout
614 .ram()
615 .last()
616 .expect("memory map must have at least 1 entry");
617 let address_space_size = last_entry.range.end();
618 let mapping =
619 SparseMapping::new(address_space_size as usize).map_err(MappingError::Reserve)?;
620
621 tracing::trace!(?mapping, "map_lower_vtl_memory mapping");
622
623 let mut permission_bitmaps = if self.permissions_bitmap_state.is_some() {
624 Some(PermissionBitmaps {
625 permission_update_lock: Default::default(),
626 read_bitmap: GuestMemoryBitmap::new(address_space_size as usize)?,
627 write_bitmap: GuestMemoryBitmap::new(address_space_size as usize)?,
628 kernel_execute_bitmap: GuestMemoryBitmap::new(address_space_size as usize)?,
629 user_execute_bitmap: GuestMemoryBitmap::new(address_space_size as usize)?,
630 })
631 } else {
632 None
633 };
634
635 for entry in memory_layout.ram() {
637 if entry.range.is_empty() {
638 continue;
639 }
640 let base_addr = entry.range.start();
641 let file_offset = file_starting_offset.checked_add(base_addr).unwrap();
642
643 tracing::trace!(base_addr, file_offset, "mapping lower ram");
644
645 mapping
646 .map_file(
647 base_addr as usize,
648 entry.range.len() as usize,
649 mshv_vtl_low.get(),
650 file_offset,
651 true,
652 )
653 .map_err(MappingError::Map)?;
654
655 if let Some((bitmaps, state)) = permission_bitmaps
656 .as_mut()
657 .zip(self.permissions_bitmap_state)
658 {
659 bitmaps.read_bitmap.init(entry.range, state)?;
660 bitmaps.write_bitmap.init(entry.range, state)?;
661 bitmaps.kernel_execute_bitmap.init(entry.range, state)?;
662 bitmaps.user_execute_bitmap.init(entry.range, state)?;
663 }
664
665 tracing::trace!(?entry, "mapped memory map entry");
666 }
667
668 let registrar = if self.for_kernel_access {
669 let mshv = Mshv::new().map_err(MappingError::OpenDevice)?;
670 let mshv_vtl = mshv.create_vtl().map_err(MappingError::OpenDevice)?;
671 Some(MemoryRegistrar::new(
672 memory_layout,
673 self.physical_address_base,
674 MshvVtlWithPolicy {
675 mshv_vtl,
676 ignore_registration_failure: self.ignore_registration_failure,
677 shared: self.shared,
678 },
679 ))
680 } else {
681 None
682 };
683
684 Ok(GuestMemoryMapping {
685 mapping,
686 iova_offset: self.dma_base_address,
687 valid_memory: self.valid_memory.clone(),
688 permission_bitmaps,
689 registrar,
690 })
691 }
692}
693
694impl GuestMemoryMapping {
695 pub fn builder(physical_address_base: u64) -> GuestMemoryMappingBuilder {
701 GuestMemoryMappingBuilder {
702 physical_address_base,
703 valid_memory: None,
704 permissions_bitmap_state: None,
705 shared: false,
706 for_kernel_access: false,
707 dma_base_address: None,
708 ignore_registration_failure: false,
709 }
710 }
711
712 pub fn update_permission_bitmaps(&self, range: MemoryRange, flags: HvMapGpaFlags) {
715 if let Some(bitmaps) = self.permission_bitmaps.as_ref() {
716 let _lock = bitmaps.permission_update_lock.lock();
717 bitmaps.read_bitmap.update(range, flags.readable());
718 bitmaps.write_bitmap.update(range, flags.writable());
719 bitmaps
720 .kernel_execute_bitmap
721 .update(range, flags.kernel_executable());
722 bitmaps
723 .user_execute_bitmap
724 .update(range, flags.user_executable());
725 }
726 }
727
728 pub fn query_access_permission(&self, gpn: u64) -> Result<HvMapGpaFlags, VtlPermissionsError> {
731 if let Some(bitmaps) = self.permission_bitmaps.as_ref() {
732 Ok(HvMapGpaFlags::new()
733 .with_readable(bitmaps.read_bitmap.page_state(gpn))
734 .with_writable(bitmaps.write_bitmap.page_state(gpn))
735 .with_kernel_executable(bitmaps.kernel_execute_bitmap.page_state(gpn))
736 .with_user_executable(bitmaps.user_execute_bitmap.page_state(gpn)))
737 } else {
738 Err(VtlPermissionsError::NoPermissionsTracked)
739 }
740 }
741
742 pub(crate) fn zero_range(
744 &self,
745 range: MemoryRange,
746 ) -> Result<(), sparse_mmap::SparseMappingError> {
747 self.mapping
748 .fill_at(range.start() as usize, 0, range.len() as usize)
749 }
750}