sidecar/arch/x86_64/
temporary_map.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! Code to temporarily map a physical address to a virtual address, during
5//! kernel start.
6
7use super::addr_space;
8use core::ops::Deref;
9use core::ops::DerefMut;
10use core::sync::atomic::Ordering::SeqCst;
11use core::sync::atomic::compiler_fence;
12use x86defs::Pte;
13
14const PAGE_SIZE: usize = 0x1000;
15const PAGE_MASK: usize = 0xfff;
16
17pub struct Mapper {
18    ptr: *mut (),
19    pte: &'static mut Pte,
20}
21
22pub struct TemporaryMap<'a, T>(&'a mut T, &'a mut Mapper);
23
24impl<T> Deref for TemporaryMap<'_, T> {
25    type Target = T;
26
27    fn deref(&self) -> &Self::Target {
28        self.0
29    }
30}
31
32impl<T> DerefMut for TemporaryMap<'_, T> {
33    fn deref_mut(&mut self) -> &mut Self::Target {
34        self.0
35    }
36}
37
38impl Mapper {
39    /// # Safety
40    ///
41    /// The caller must ensure there is only one `Mapper` for a given
42    /// `index` at a time.
43    pub unsafe fn new(index: usize) -> Self {
44        // SAFETY: the caller guarantees that we have unique access.
45        let pte = unsafe { &mut *addr_space::temp_ptes().add(index) };
46        Self {
47            pte,
48            ptr: (addr_space::temporary_map() + index * PAGE_SIZE) as *mut (),
49        }
50    }
51
52    /// Maps the given physical address into virtual address space for the
53    /// lifetime of the return value.
54    ///
55    /// `pa` must be page aligned.
56    ///
57    /// # Safety
58    /// The caller must ensure that the object being mapped is a valid `T`
59    /// before they dereference it.
60    #[track_caller]
61    pub unsafe fn map<T>(&mut self, pa: u64) -> TemporaryMap<'_, T> {
62        assert!((pa as usize & PAGE_MASK) + size_of::<T>() <= PAGE_SIZE);
63        assert!(!self.pte.present());
64        *self.pte = Pte::new()
65            .with_address(pa & !(PAGE_MASK as u64))
66            .with_present(true)
67            .with_read_write(true);
68        compiler_fence(SeqCst);
69        // SAFETY: the caller guarantees that the physical address is valid.
70        let ptr = unsafe { &mut *self.ptr.byte_add(pa as usize & PAGE_MASK).cast() };
71        TemporaryMap(ptr, self)
72    }
73}
74
75impl<T> Drop for TemporaryMap<'_, T> {
76    fn drop(&mut self) {
77        compiler_fence(SeqCst);
78        *self.1.pte = Pte::new();
79        // SAFETY: invalidating the previous mapping. This has no safety
80        // requirements.
81        unsafe {
82            core::arch::asm! {
83                "invlpg [{0}]",
84                in(reg) self.1.ptr,
85            }
86        }
87    }
88}