1use super::VpContextBuilder;
7use super::VpContextState;
8use crate::vp_context_builder::VpContextPageState;
9use igvm_defs::PAGE_SIZE_4K;
10use loader::importer::SegmentRegister;
11use loader::importer::X86Register;
12use loader_defs::shim::TdxTrampolineContext;
13use std::mem::offset_of;
14use x86defs::X64_EFER_LME;
15use x86defs::X86X_MSR_DEFAULT_PAT;
16use zerocopy::IntoBytes;
17
18#[derive(Debug)]
21pub struct TdxHardwareContext {
22 trampoline_context: TdxTrampolineContext,
23 accept_lower_1mb: bool,
24}
25
26impl TdxHardwareContext {
27 pub fn new(accept_lower_1mb: bool) -> Self {
28 Self {
29 trampoline_context: TdxTrampolineContext::default(),
30 accept_lower_1mb,
31 }
32 }
33}
34
35impl VpContextBuilder for TdxHardwareContext {
36 type Register = X86Register;
37
38 fn import_vp_register(&mut self, register: X86Register) {
41 let mut set_data_selector = |reg: SegmentRegister| {
42 if self.trampoline_context.data_selector == 0 {
43 self.trampoline_context.data_selector = reg.selector;
44 } else if self.trampoline_context.data_selector != reg.selector {
45 panic!("data selectors must be the same");
46 }
47 };
48
49 match register {
50 X86Register::Gdtr(reg) => {
51 self.trampoline_context.gdtr_base = reg.base;
52 self.trampoline_context.gdtr_limit = reg.limit;
53 }
54 X86Register::Idtr(reg) => {
55 self.trampoline_context.idtr_base = reg.base;
56 self.trampoline_context.idtr_limit = reg.limit;
57 }
58 X86Register::Ds(reg)
59 | X86Register::Es(reg)
60 | X86Register::Fs(reg)
61 | X86Register::Gs(reg)
62 | X86Register::Ss(reg) => set_data_selector(reg),
63 X86Register::Cs(reg) => self.trampoline_context.code_selector = reg.selector,
64 X86Register::Tr(reg) => {
65 self.trampoline_context.task_selector = reg.selector;
66 }
67 X86Register::Cr0(cr0) => self.trampoline_context.cr0 = cr0,
68 X86Register::Cr3(cr3) => {
69 let cr3_u32: u32 = cr3.try_into().expect("cr3 must fit in u32");
70 self.trampoline_context.transition_cr3 = cr3_u32;
71 self.trampoline_context.cr3 = cr3;
72 }
73 X86Register::Cr4(cr4) => self.trampoline_context.cr4 = cr4,
74 X86Register::Efer(efer) => {
75 if efer & X64_EFER_LME == 0 {
79 panic!("EFER LME must be set for tdx")
80 }
81 }
82 X86Register::Pat(pat) => {
83 if pat != X86X_MSR_DEFAULT_PAT {
84 panic!("PAT must be default for tdx")
85 }
86 }
87 X86Register::Rbp(rbp) => self.trampoline_context.rbp = rbp,
88 X86Register::Rip(rip) => self.trampoline_context.initial_rip = rip,
89 X86Register::Rsi(rsi) => self.trampoline_context.rsi = rsi,
90 X86Register::Rsp(rsp) => self.trampoline_context.rsp = rsp,
91 X86Register::R8(r8) => self.trampoline_context.r8 = r8,
92 X86Register::R9(r9) => self.trampoline_context.r9 = r9,
93 X86Register::R10(r10) => self.trampoline_context.r10 = r10,
94 X86Register::R11(r11) => self.trampoline_context.r11 = r11,
95 X86Register::R12(_) => panic!("r12 not allowed for tdx"),
96 X86Register::Rflags(_) => panic!("rflags not allowed for tdx"),
97
98 X86Register::MtrrDefType(_)
99 | X86Register::MtrrPhysBase0(_)
100 | X86Register::MtrrPhysMask0(_)
101 | X86Register::MtrrPhysBase1(_)
102 | X86Register::MtrrPhysMask1(_)
103 | X86Register::MtrrPhysBase2(_)
104 | X86Register::MtrrPhysMask2(_)
105 | X86Register::MtrrPhysBase3(_)
106 | X86Register::MtrrPhysMask3(_)
107 | X86Register::MtrrPhysBase4(_)
108 | X86Register::MtrrPhysMask4(_)
109 | X86Register::MtrrFix64k00000(_)
110 | X86Register::MtrrFix16k80000(_)
111 | X86Register::MtrrFix4kE0000(_)
112 | X86Register::MtrrFix4kE8000(_)
113 | X86Register::MtrrFix4kF0000(_)
114 | X86Register::MtrrFix4kF8000(_) => {
115 tracing::warn!(?register, "Ignoring MTRR register for TDX.")
116 }
117 }
118 }
119
120 fn set_vp_context_memory(&mut self, _page_base: u64) {
121 unimplemented!("not supported for TDX");
122 }
123
124 fn finalize(&mut self, state: &mut Vec<VpContextState>) {
125 self.trampoline_context.static_gdt[0x08] = 0xFF;
129 self.trampoline_context.static_gdt[0x09] = 0xFF;
130 self.trampoline_context.static_gdt[0x0D] = 0x9B;
131 self.trampoline_context.static_gdt[0x0E] = 0xA0;
132
133 self.trampoline_context.static_gdt_limit = 0xF;
134 self.trampoline_context.static_gdt_base =
135 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt) as u32;
136
137 let mut byte_offset = 0xFF0;
140
141 let mut reset_page = vec![0xCCu8; PAGE_SIZE_4K as usize];
143
144 let trampoline_context = self.trampoline_context.as_bytes();
146 reset_page[0..trampoline_context.len()].copy_from_slice(trampoline_context);
147
148 let copy_instr =
149 |trampoline_page: &mut Vec<u8>, byte_offset, instruction: &[u8]| -> usize {
150 trampoline_page[byte_offset..byte_offset + instruction.len()]
151 .copy_from_slice(instruction);
152 byte_offset + instruction.len()
153 };
154
155 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xE9]);
157 let mut relative_offset =
158 (trampoline_context.len() as u32).wrapping_sub((byte_offset + 4) as u32);
159 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
160
161 byte_offset = trampoline_context.len();
162
163 let l0_offset = byte_offset;
165
166 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
168
169 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
171 byte_offset += 1;
172 let mailbox_end = byte_offset;
173 let mailbox_begin = byte_offset;
174
175 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xC0]);
177
178 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8b, 0x05]);
180 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, mailbox_command) as u32;
181 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
182
183 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0xba, 0x01, 0x00]);
185
186 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x39, 0xd0]);
188
189 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
191 byte_offset += 1;
192 reset_page[byte_offset.wrapping_sub(1)] = (mailbox_begin.wrapping_sub(byte_offset)) as u8;
193
194 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3b, 0x35]);
196 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, mailbox_apic_id) as u32;
197 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
198
199 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
201 byte_offset += 1;
202 reset_page[byte_offset.wrapping_sub(1)] = (mailbox_begin.wrapping_sub(byte_offset)) as u8;
203
204 reset_page[mailbox_end.wrapping_sub(1)] = (byte_offset.wrapping_sub(mailbox_end)) as u8;
206
207 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
209 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt_limit) as u32;
210 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
211
212 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
219 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr4) as u32;
220 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
221
222 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xE0]);
224
225 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
227 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, transition_cr3) as u32;
228 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
229
230 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
232
233 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
235 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr0) as u32;
236 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
237
238 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xC0]);
240
241 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xEA]);
243 relative_offset = 0xFFFFF000 + byte_offset as u32 + 6;
244 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
245 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x08, 0x00]);
246
247 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x05]);
253 relative_offset =
254 (offset_of!(TdxTrampolineContext, cr3) as u32).wrapping_sub((byte_offset + 4) as u32);
255 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
256
257 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
259
260 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
266 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
267 .wrapping_sub((byte_offset + 4) as u32);
268 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
269
270 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
272
273 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
275 byte_offset += 1;
276 let l4_offset = byte_offset as u32;
277
278 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
280 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
281 .wrapping_sub((byte_offset + 4) as u32);
282 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
283
284 reset_page[l0_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l0_offset)) as u8;
286
287 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
289 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
290 .wrapping_sub((byte_offset + 4) as u32);
291 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
292
293 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
295
296 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
298 byte_offset += 1;
299 let jump_offset = byte_offset;
300
301 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x1D]);
303 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
304 .wrapping_sub((byte_offset + 4) as u32);
305 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
306
307 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
309
310 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
312 relative_offset = (offset_of!(TdxTrampolineContext, data_selector) as u32)
313 .wrapping_sub((byte_offset + 4) as u32);
314 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
315
316 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD0]);
318
319 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD8]);
321
322 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xC0]);
324
325 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE0]);
327
328 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE8]);
330
331 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
333 relative_offset = (offset_of!(TdxTrampolineContext, task_selector) as u32)
334 .wrapping_sub((byte_offset + 4) as u32);
335 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
336
337 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
339
340 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
342 byte_offset += 1;
343 let jump_offset = byte_offset;
344
345 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x00, 0xD8]);
347
348 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
350
351 reset_page[(l4_offset as usize).wrapping_sub(1)] =
353 (byte_offset.wrapping_sub(l4_offset as usize)) as u8;
354
355 if self.accept_lower_1mb {
359 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
361
362 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
364 byte_offset += 1;
365 let l3_offset = byte_offset;
366
367 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xC9]);
370
371 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xD2]);
373
374 byte_offset = copy_instr(
376 &mut reset_page,
377 byte_offset,
378 &[0xBF, 0x00, 0x00, 0x10, 0x00],
379 );
380
381 let jump_offset = byte_offset;
383
384 byte_offset = copy_instr(
386 &mut reset_page,
387 byte_offset,
388 &[0xB8, 0x06, 0x00, 0x00, 0x00],
389 );
390
391 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
393
394 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x85, 0xC0]);
396
397 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x85]);
399 byte_offset += 4;
400 let relative_offset = 0xFEF - byte_offset;
401 copy_instr(
402 &mut reset_page,
403 byte_offset.wrapping_sub(4),
404 relative_offset.as_bytes(),
405 );
406
407 byte_offset = copy_instr(
409 &mut reset_page,
410 byte_offset,
411 &[0x81, 0xC1, 0x00, 0x10, 0x00, 0x00],
412 );
413
414 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3B, 0xCF]);
416
417 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x72]);
419 byte_offset += 1;
420 reset_page[byte_offset.wrapping_sub(1)] = (jump_offset.wrapping_sub(byte_offset)) as u8;
421
422 reset_page[l3_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l3_offset)) as u8;
424 }
425
426 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
430
431 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
433 byte_offset += 1;
434 let l7_offset = byte_offset;
435
436 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x31, 0xC0]);
438
439 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x89, 0x05]);
441 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
442 .wrapping_sub((byte_offset + 4) as u32);
443 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
444
445 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8b, 0x05]);
447 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_wakeup_vector) as u32)
448 .wrapping_sub((byte_offset + 4) as u32);
449 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
450
451 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x89, 0x05]);
453 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
454 .wrapping_sub((byte_offset + 4) as u32);
455 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
456
457 reset_page[l7_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l7_offset)) as u8;
459
460 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x25]);
462 relative_offset =
463 (offset_of!(TdxTrampolineContext, rsp) as u32).wrapping_sub((byte_offset + 4) as u32);
464 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
465
466 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x2D]);
468 relative_offset =
469 (offset_of!(TdxTrampolineContext, rbp) as u32).wrapping_sub((byte_offset + 4) as u32);
470 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
471
472 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0xCE]);
474
475 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x35]);
477 relative_offset =
478 (offset_of!(TdxTrampolineContext, rsi) as u32).wrapping_sub((byte_offset + 4) as u32);
479 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
480
481 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x05]);
483 relative_offset =
484 (offset_of!(TdxTrampolineContext, r8) as u32).wrapping_sub((byte_offset + 4) as u32);
485 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
486
487 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x0D]);
489 relative_offset =
490 (offset_of!(TdxTrampolineContext, r9) as u32).wrapping_sub((byte_offset + 4) as u32);
491 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
492
493 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x15]);
495 relative_offset =
496 (offset_of!(TdxTrampolineContext, r10) as u32).wrapping_sub((byte_offset + 4) as u32);
497 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
498
499 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x1D]);
501 relative_offset =
502 (offset_of!(TdxTrampolineContext, r11) as u32).wrapping_sub((byte_offset + 4) as u32);
503 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
504
505 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
507 relative_offset = (offset_of!(TdxTrampolineContext, code_selector) as u32)
508 .wrapping_sub((byte_offset + 4) as u32);
509 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
510
511 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
513
514 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
516 byte_offset += 1;
517 let jump_offset = byte_offset;
518
519 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x2D]);
521 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
522 .wrapping_sub((byte_offset + 4) as u32);
523 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
524
525 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
527
528 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x25]);
530 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
531 .wrapping_sub((byte_offset + 4) as u32);
532 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
533
534 state.push(VpContextState::Page(VpContextPageState {
536 page_base: 0xFFFFF,
537 page_count: 1,
538 acceptance: loader::importer::BootPageAcceptance::Exclusive,
539 data: reset_page,
540 }));
541 }
542}