openhcl_boot/arch/x86_64/
memory.rs1use super::address_space::LocalMap;
7use super::address_space::init_local_map;
8use crate::ShimParams;
9use crate::arch::TdxHypercallPage;
10use crate::arch::x86_64::address_space::tdx_share_large_page;
11use crate::host_params::PartitionInfo;
12use crate::host_params::shim_params::IsolationType;
13use crate::hypercall::hvcall;
14use memory_range::MemoryRange;
15use sha2::Digest;
16use sha2::Sha384;
17use x86defs::X64_LARGE_PAGE_SIZE;
18use x86defs::tdx::TDX_SHARED_GPA_BOUNDARY_ADDRESS_BIT;
19
20pub fn setup_vtl2_memory(shim_params: &ShimParams, partition_info: &PartitionInfo) {
23 if let IsolationType::None = shim_params.isolation_type {
30 return;
31 }
32
33 if let IsolationType::Vbs = shim_params.isolation_type {
34 let vsm_config = hvdef::HvRegisterVsmPartitionConfig::new()
37 .with_default_vtl_protection_mask(0xF)
38 .with_enable_vtl_protection(true);
39
40 hvcall()
41 .set_register(
42 hvdef::HvX64RegisterName::VsmPartitionConfig.into(),
43 hvdef::HvRegisterValue::from(u64::from(vsm_config)),
44 )
45 .expect("setting vsm config shouldn't fail");
46
47 let accepted_ranges =
52 shim_params
53 .imported_regions()
54 .filter_map(|(imported_range, already_accepted)| {
55 already_accepted.then_some(imported_range)
56 });
57 for range in memory_range::overlapping_ranges(
58 partition_info.vtl2_ram.iter().map(|entry| entry.range),
59 accepted_ranges,
60 ) {
61 hvcall()
62 .apply_vtl2_protections(range)
63 .expect("applying vtl 2 protections cannot fail");
64 }
65 }
66
67 let mut local_map = match shim_params.isolation_type {
70 IsolationType::Snp | IsolationType::Tdx => Some(init_local_map(
71 loader_defs::paravisor::PARAVISOR_LOCAL_MAP_VA,
72 )),
73 IsolationType::None | IsolationType::Vbs => None,
74 };
75
76 let mut last_range_end = None;
78 for (imported_range, _) in shim_params.imported_regions() {
79 assert!(last_range_end.is_none() || imported_range.start() > last_range_end.unwrap());
80 last_range_end = Some(imported_range.end() - hvdef::HV_PAGE_SIZE);
81 }
82
83 for range in memory_range::subtract_ranges(
86 partition_info.vtl2_ram.iter().map(|e| e.range),
87 shim_params.imported_regions().map(|(r, _)| r),
88 ) {
89 accept_vtl2_memory(shim_params, &mut local_map, range);
90 }
91
92 let ram_buffer = if let Some(bounce_buffer) = shim_params.bounce_buffer {
93 assert!(bounce_buffer.start() % X64_LARGE_PAGE_SIZE == 0);
94 assert!(bounce_buffer.len() >= X64_LARGE_PAGE_SIZE);
95
96 for range in memory_range::subtract_ranges(
97 core::iter::once(bounce_buffer),
98 partition_info.vtl2_ram.iter().map(|e| e.range),
99 ) {
100 accept_vtl2_memory(shim_params, &mut local_map, range);
101 }
102
103 unsafe {
107 core::slice::from_raw_parts_mut(
108 bounce_buffer.start() as *mut u8,
109 bounce_buffer.len() as usize,
110 )
111 }
112 } else {
113 &mut []
114 };
115
116 for (imported_range, already_accepted) in shim_params.imported_regions() {
119 if !already_accepted {
120 accept_pending_vtl2_memory(shim_params, &mut local_map, ram_buffer, imported_range);
121 }
122 }
123
124 if shim_params.isolation_type == IsolationType::Tdx {
128 let free_buffer = ram_buffer.as_mut_ptr() as u64;
129 assert!(free_buffer % X64_LARGE_PAGE_SIZE == 0);
130 let tdx_io_page = unsafe {
133 tdx_share_large_page(free_buffer);
134 TdxHypercallPage::new(free_buffer)
135 };
136 hvcall().initialize_tdx(tdx_io_page);
137 }
138}
139
140fn accept_vtl2_memory(
142 shim_params: &ShimParams,
143 local_map: &mut Option<LocalMap<'_>>,
144 range: MemoryRange,
145) {
146 match shim_params.isolation_type {
147 IsolationType::Vbs => {
148 hvcall()
149 .accept_vtl2_pages(range, hvdef::hypercall::AcceptMemoryType::RAM)
150 .expect("accepting vtl 2 memory must not fail");
151 }
152 IsolationType::Snp => {
153 super::snp::set_page_acceptance(local_map.as_mut().unwrap(), range, true)
154 .expect("accepting vtl 2 memory must not fail");
155 }
156 IsolationType::Tdx => {
157 super::tdx::accept_pages(range).expect("accepting vtl2 memory must not fail")
158 }
159 _ => unreachable!(),
160 }
161}
162
163fn accept_pending_vtl2_memory(
166 shim_params: &ShimParams,
167 local_map: &mut Option<LocalMap<'_>>,
168 ram_buffer: &mut [u8],
169 range: MemoryRange,
170) {
171 let isolation_type = shim_params.isolation_type;
172
173 match isolation_type {
174 IsolationType::Vbs => {
175 hvcall()
176 .accept_vtl2_pages(range, hvdef::hypercall::AcceptMemoryType::RAM)
177 .expect("accepting vtl 2 memory must not fail");
178 }
179 IsolationType::Snp | IsolationType::Tdx => {
180 let local_map = local_map.as_mut().unwrap();
181 let mut remaining = range;
187 while !remaining.is_empty() {
188 let range = MemoryRange::new(
190 remaining.start()
191 ..remaining.end().min(
192 (remaining.start() + X64_LARGE_PAGE_SIZE) & !(X64_LARGE_PAGE_SIZE - 1),
193 ),
194 );
195 remaining = MemoryRange::new(range.end()..remaining.end());
196
197 let ram_buffer = &mut ram_buffer[..range.len() as usize];
198
199 {
201 let map_range = if isolation_type == IsolationType::Tdx {
202 MemoryRange::new(
204 range.start() | TDX_SHARED_GPA_BOUNDARY_ADDRESS_BIT
205 ..range.end() | TDX_SHARED_GPA_BOUNDARY_ADDRESS_BIT,
206 )
207 } else {
208 range
209 };
210
211 let mapping = local_map.map_pages(map_range, false);
212 ram_buffer.copy_from_slice(mapping.data);
213 }
214
215 match isolation_type {
217 IsolationType::Snp => {
218 super::snp::Ghcb::change_page_visibility(range, false);
219 }
220 IsolationType::Tdx => {
221 super::tdx::change_page_visibility(range, false);
222 }
223 _ => unreachable!(),
224 }
225
226 match isolation_type {
228 IsolationType::Snp => {
229 super::snp::set_page_acceptance(local_map, range, true)
230 .expect("accepting vtl 2 memory must not fail");
231 }
232 IsolationType::Tdx => {
233 super::tdx::accept_pages(range)
234 .expect("accepting vtl 2 memory must not fail");
235 }
236 _ => unreachable!(),
237 }
238
239 {
241 let mapping = unsafe {
243 core::slice::from_raw_parts_mut(
244 range.start() as *mut u8,
245 range.len() as usize,
246 )
247 };
248
249 mapping.copy_from_slice(ram_buffer);
250 }
251 }
252 }
253 _ => unreachable!(),
254 }
255}
256
257pub fn verify_imported_regions_hash(shim_params: &ShimParams) {
260 if let IsolationType::None = shim_params.isolation_type {
263 return;
264 }
265
266 if shim_params
268 .imported_regions()
269 .all(|(_, already_accepted)| already_accepted)
270 {
271 return;
272 }
273
274 let mut hasher = Sha384::new();
275 shim_params
276 .imported_regions()
277 .filter(|(_, already_accepted)| !already_accepted)
278 .for_each(|(range, _)| {
279 let mapping = unsafe {
282 core::slice::from_raw_parts(range.start() as *const u8, range.len() as usize)
283 };
284 hasher.update(mapping);
285 });
286
287 if hasher.finalize().as_slice() != shim_params.imported_regions_hash() {
288 panic!("Imported regions hash mismatch");
289 }
290}