sidecar/arch/x86_64/
temporary_map.rs1use super::addr_space;
8use core::ops::Deref;
9use core::ops::DerefMut;
10use core::sync::atomic::Ordering::SeqCst;
11use core::sync::atomic::compiler_fence;
12use x86defs::Pte;
13
14const PAGE_SIZE: usize = 0x1000;
15const PAGE_MASK: usize = 0xfff;
16
17pub struct Mapper {
18 ptr: *mut (),
19 pte: &'static mut Pte,
20}
21
22pub struct TemporaryMap<'a, T>(&'a mut T, &'a mut Mapper);
23
24impl<T> Deref for TemporaryMap<'_, T> {
25 type Target = T;
26
27 fn deref(&self) -> &Self::Target {
28 self.0
29 }
30}
31
32impl<T> DerefMut for TemporaryMap<'_, T> {
33 fn deref_mut(&mut self) -> &mut Self::Target {
34 self.0
35 }
36}
37
38impl Mapper {
39 pub unsafe fn new(index: usize) -> Self {
44 let pte = unsafe { &mut *addr_space::temp_ptes().add(index) };
46 Self {
47 pte,
48 ptr: (addr_space::temporary_map() + index * PAGE_SIZE) as *mut (),
49 }
50 }
51
52 #[track_caller]
61 pub unsafe fn map<T>(&mut self, pa: u64) -> TemporaryMap<'_, T> {
62 assert!((pa as usize & PAGE_MASK) + size_of::<T>() <= PAGE_SIZE);
63 assert!(!self.pte.present());
64 *self.pte = Pte::new()
65 .with_address(pa & !(PAGE_MASK as u64))
66 .with_present(true)
67 .with_read_write(true);
68 compiler_fence(SeqCst);
69 let ptr = unsafe { &mut *self.ptr.byte_add(pa as usize & PAGE_MASK).cast() };
71 TemporaryMap(ptr, self)
72 }
73}
74
75impl<T> Drop for TemporaryMap<'_, T> {
76 fn drop(&mut self) {
77 compiler_fence(SeqCst);
78 *self.1.pte = Pte::new();
79 unsafe {
82 core::arch::asm! {
83 "invlpg [{0}]",
84 in(reg) self.1.ptr,
85 }
86 }
87 }
88}