1use super::VpContextBuilder;
7use super::VpContextState;
8use crate::vp_context_builder::VpContextPageState;
9use igvm_defs::PAGE_SIZE_4K;
10use loader::importer::SegmentRegister;
11use loader::importer::X86Register;
12use loader_defs::shim::TdxTrampolineContext;
13use std::mem::offset_of;
14use x86defs::X64_EFER_LME;
15use x86defs::X86X_MSR_DEFAULT_PAT;
16use zerocopy::IntoBytes;
17
18#[derive(Debug)]
21pub struct TdxHardwareContext {
22 trampoline_context: TdxTrampolineContext,
23 accept_lower_1mb: bool,
24}
25
26impl TdxHardwareContext {
27 pub fn new(accept_lower_1mb: bool) -> Self {
28 Self {
29 trampoline_context: TdxTrampolineContext::default(),
30 accept_lower_1mb,
31 }
32 }
33}
34
35impl VpContextBuilder for TdxHardwareContext {
36 type Register = X86Register;
37
38 fn import_vp_register(&mut self, register: X86Register) {
41 let mut set_data_selector = |reg: SegmentRegister| {
42 if self.trampoline_context.data_selector == 0 {
43 self.trampoline_context.data_selector = reg.selector;
44 } else if self.trampoline_context.data_selector != reg.selector {
45 panic!("data selectors must be the same");
46 }
47 };
48
49 match register {
50 X86Register::Gdtr(reg) => {
51 self.trampoline_context.gdtr_base = reg.base;
52 self.trampoline_context.gdtr_limit = reg.limit;
53 }
54 X86Register::Idtr(reg) => {
55 self.trampoline_context.idtr_base = reg.base;
56 self.trampoline_context.idtr_limit = reg.limit;
57 }
58 X86Register::Ds(reg)
59 | X86Register::Es(reg)
60 | X86Register::Fs(reg)
61 | X86Register::Gs(reg)
62 | X86Register::Ss(reg) => set_data_selector(reg),
63 X86Register::Cs(reg) => self.trampoline_context.code_selector = reg.selector,
64 X86Register::Tr(reg) => {
65 self.trampoline_context.task_selector = reg.selector;
66 }
67 X86Register::Cr0(cr0) => self.trampoline_context.cr0 = cr0,
68 X86Register::Cr3(cr3) => {
69 let cr3_u32: u32 = cr3.try_into().expect("cr3 must fit in u32");
70 self.trampoline_context.transition_cr3 = cr3_u32;
71 self.trampoline_context.cr3 = cr3;
72 }
73 X86Register::Cr4(cr4) => self.trampoline_context.cr4 = cr4,
74 X86Register::Efer(efer) => {
75 if efer & X64_EFER_LME == 0 {
79 panic!("EFER LME must be set for tdx")
80 }
81 }
82 X86Register::Pat(pat) => {
83 if pat != X86X_MSR_DEFAULT_PAT {
84 panic!("PAT must be default for tdx")
85 }
86 }
87 X86Register::Rbp(rbp) => self.trampoline_context.rbp = rbp,
88 X86Register::Rip(rip) => self.trampoline_context.initial_rip = rip,
89 X86Register::Rsi(rsi) => self.trampoline_context.rsi = rsi,
90 X86Register::Rsp(rsp) => self.trampoline_context.rsp = rsp,
91 X86Register::R8(r8) => self.trampoline_context.r8 = r8,
92 X86Register::R9(r9) => self.trampoline_context.r9 = r9,
93 X86Register::R10(r10) => self.trampoline_context.r10 = r10,
94 X86Register::R11(r11) => self.trampoline_context.r11 = r11,
95 X86Register::R12(_) => panic!("r12 not allowed for tdx"),
96 X86Register::Rflags(_) => panic!("rflags not allowed for tdx"),
97
98 X86Register::MtrrDefType(_)
99 | X86Register::MtrrPhysBase0(_)
100 | X86Register::MtrrPhysMask0(_)
101 | X86Register::MtrrPhysBase1(_)
102 | X86Register::MtrrPhysMask1(_)
103 | X86Register::MtrrPhysBase2(_)
104 | X86Register::MtrrPhysMask2(_)
105 | X86Register::MtrrPhysBase3(_)
106 | X86Register::MtrrPhysMask3(_)
107 | X86Register::MtrrPhysBase4(_)
108 | X86Register::MtrrPhysMask4(_)
109 | X86Register::MtrrFix64k00000(_)
110 | X86Register::MtrrFix16k80000(_)
111 | X86Register::MtrrFix4kE0000(_)
112 | X86Register::MtrrFix4kE8000(_)
113 | X86Register::MtrrFix4kF0000(_)
114 | X86Register::MtrrFix4kF8000(_) => {
115 tracing::warn!(?register, "Ignoring MTRR register for TDX.")
116 }
117 }
118 }
119
120 fn set_vp_context_memory(&mut self, _page_base: u64) {
121 unimplemented!("not supported for TDX");
122 }
123
124 fn finalize(&mut self, state: &mut Vec<VpContextState>) {
125 self.trampoline_context.static_gdt[0x08] = 0xFF;
129 self.trampoline_context.static_gdt[0x09] = 0xFF;
130 self.trampoline_context.static_gdt[0x0D] = 0x9B;
131 self.trampoline_context.static_gdt[0x0E] = 0xA0;
132
133 self.trampoline_context.static_gdt_limit = 0xF;
134 self.trampoline_context.static_gdt_base =
135 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt) as u32;
136
137 let mut byte_offset = 0xFF0;
140
141 let mut reset_page = vec![0xCCu8; PAGE_SIZE_4K as usize];
143
144 let trampoline_context = self.trampoline_context.as_bytes();
146 reset_page[0..trampoline_context.len()].copy_from_slice(trampoline_context);
147
148 let copy_instr =
149 |trampoline_page: &mut Vec<u8>, byte_offset, instruction: &[u8]| -> usize {
150 trampoline_page[byte_offset..byte_offset + instruction.len()]
151 .copy_from_slice(instruction);
152 byte_offset + instruction.len()
153 };
154
155 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xE9]);
157 let mut relative_offset =
158 (trampoline_context.len() as u32).wrapping_sub((byte_offset + 4) as u32);
159 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
160
161 byte_offset = trampoline_context.len();
162
163 let l0_offset = byte_offset;
165
166 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
168 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt_limit) as u32;
169 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
170
171 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
178 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr4) as u32;
179 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
180
181 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xE0]);
183
184 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
186 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, transition_cr3) as u32;
187 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
188
189 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
191
192 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
194 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr0) as u32;
195 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
196
197 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xC0]);
199
200 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xEA]);
202 relative_offset = 0xFFFFF000 + byte_offset as u32 + 6;
203 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
204 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x08, 0x00]);
205
206 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x05]);
212 relative_offset =
213 (offset_of!(TdxTrampolineContext, cr3) as u32).wrapping_sub((byte_offset + 4) as u32);
214 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
215
216 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
218
219 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
225 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
226 .wrapping_sub((byte_offset + 4) as u32);
227 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
228
229 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
231
232 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
234 byte_offset += 1;
235 let l4_offset = byte_offset as u32;
236
237 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
239 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
240 .wrapping_sub((byte_offset + 4) as u32);
241 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
242
243 reset_page[l0_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l0_offset)) as u8;
245
246 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
248 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
249 .wrapping_sub((byte_offset + 4) as u32);
250 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
251
252 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
254
255 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
257 byte_offset += 1;
258 let jump_offset = byte_offset;
259
260 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x1D]);
262 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
263 .wrapping_sub((byte_offset + 4) as u32);
264 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
265
266 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
268
269 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
271 relative_offset = (offset_of!(TdxTrampolineContext, data_selector) as u32)
272 .wrapping_sub((byte_offset + 4) as u32);
273 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
274
275 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD0]);
277
278 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD8]);
280
281 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xC0]);
283
284 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE0]);
286
287 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE8]);
289
290 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
292 relative_offset = (offset_of!(TdxTrampolineContext, task_selector) as u32)
293 .wrapping_sub((byte_offset + 4) as u32);
294 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
295
296 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
298
299 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
301 byte_offset += 1;
302 let jump_offset = byte_offset;
303
304 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x00, 0xD8]);
306
307 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
309
310 reset_page[(l4_offset as usize).wrapping_sub(1)] =
312 (byte_offset.wrapping_sub(l4_offset as usize)) as u8;
313
314 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
316
317 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
321 byte_offset += 1;
322 let skip_mailbox_for_bsp = byte_offset;
323
324 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xC0]);
328
329 byte_offset = copy_instr(
331 &mut reset_page,
332 byte_offset,
333 &[0xB9, 0x00, 0x1C, 0x00, 0x00],
334 );
335
336 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4D, 0x31, 0xD2]);
338
339 byte_offset = copy_instr(
341 &mut reset_page,
342 byte_offset,
343 &[0x41, 0xBB, 0x1F, 0x00, 0x00, 0x00],
344 );
345
346 byte_offset = copy_instr(
348 &mut reset_page,
349 byte_offset,
350 &[0x41, 0xBC, 0x02, 0x08, 0x00, 0x00],
351 );
352
353 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
355
356 let mailbox_spinloop = byte_offset;
359 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
361 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_apic_id) as u32)
362 .wrapping_sub((byte_offset + 4) as u32);
363 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
364
365 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x41, 0x39, 0xC3]);
367
368 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
370 byte_offset += 1;
371 reset_page[byte_offset.wrapping_sub(1)] =
372 (mailbox_spinloop.wrapping_sub(byte_offset)) as u8;
373
374 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xDB]);
376
377 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x1D]);
379 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
380 .wrapping_sub((byte_offset + 4) as u32);
381 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
382
383 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xBA, 0x01, 0x00]);
385
386 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x39, 0xD3]);
388
389 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
391 byte_offset += 1;
392
393 reset_page[skip_mailbox_for_bsp.wrapping_sub(1)] =
395 (byte_offset.wrapping_sub(skip_mailbox_for_bsp)) as u8;
396
397 if self.accept_lower_1mb {
401 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
403
404 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
406 byte_offset += 1;
407 let l3_offset = byte_offset;
408
409 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xC9]);
412
413 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xD2]);
415
416 byte_offset = copy_instr(
418 &mut reset_page,
419 byte_offset,
420 &[0xBF, 0x00, 0x00, 0x10, 0x00],
421 );
422
423 let jump_offset = byte_offset;
425
426 byte_offset = copy_instr(
428 &mut reset_page,
429 byte_offset,
430 &[0xB8, 0x06, 0x00, 0x00, 0x00],
431 );
432
433 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
435
436 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x85, 0xC0]);
438
439 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x85]);
441 byte_offset += 4;
442 let relative_offset = 0xFEF - byte_offset;
443 copy_instr(
444 &mut reset_page,
445 byte_offset.wrapping_sub(4),
446 relative_offset.as_bytes(),
447 );
448
449 byte_offset = copy_instr(
451 &mut reset_page,
452 byte_offset,
453 &[0x81, 0xC1, 0x00, 0x10, 0x00, 0x00],
454 );
455
456 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3B, 0xCF]);
458
459 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x72]);
461 byte_offset += 1;
462 reset_page[byte_offset.wrapping_sub(1)] = (jump_offset.wrapping_sub(byte_offset)) as u8;
463
464 reset_page[l3_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l3_offset)) as u8;
466 }
467
468 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
472
473 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
475 byte_offset += 1;
476 let l7_offset = byte_offset;
477
478 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x31, 0xC0]);
480
481 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x89, 0x05]);
483 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
484 .wrapping_sub((byte_offset + 4) as u32);
485 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
486
487 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8b, 0x05]);
489 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_wakeup_vector) as u32)
490 .wrapping_sub((byte_offset + 4) as u32);
491 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
492
493 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x89, 0x05]);
495 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
496 .wrapping_sub((byte_offset + 4) as u32);
497 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
498
499 reset_page[l7_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l7_offset)) as u8;
501
502 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x25]);
504 relative_offset =
505 (offset_of!(TdxTrampolineContext, rsp) as u32).wrapping_sub((byte_offset + 4) as u32);
506 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
507
508 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x2D]);
510 relative_offset =
511 (offset_of!(TdxTrampolineContext, rbp) as u32).wrapping_sub((byte_offset + 4) as u32);
512 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
513
514 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0xCE]);
516
517 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x35]);
519 relative_offset =
520 (offset_of!(TdxTrampolineContext, rsi) as u32).wrapping_sub((byte_offset + 4) as u32);
521 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
522
523 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x05]);
525 relative_offset =
526 (offset_of!(TdxTrampolineContext, r8) as u32).wrapping_sub((byte_offset + 4) as u32);
527 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
528
529 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x0D]);
531 relative_offset =
532 (offset_of!(TdxTrampolineContext, r9) as u32).wrapping_sub((byte_offset + 4) as u32);
533 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
534
535 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x15]);
537 relative_offset =
538 (offset_of!(TdxTrampolineContext, r10) as u32).wrapping_sub((byte_offset + 4) as u32);
539 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
540
541 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x1D]);
543 relative_offset =
544 (offset_of!(TdxTrampolineContext, r11) as u32).wrapping_sub((byte_offset + 4) as u32);
545 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
546
547 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
549 relative_offset = (offset_of!(TdxTrampolineContext, code_selector) as u32)
550 .wrapping_sub((byte_offset + 4) as u32);
551 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
552
553 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
555
556 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
558 byte_offset += 1;
559 let jump_offset = byte_offset;
560
561 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x2D]);
563 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
564 .wrapping_sub((byte_offset + 4) as u32);
565 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
566
567 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
569
570 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x25]);
572 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
573 .wrapping_sub((byte_offset + 4) as u32);
574 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
575
576 state.push(VpContextState::Page(VpContextPageState {
578 page_base: 0xFFFFF,
579 page_count: 1,
580 acceptance: loader::importer::BootPageAcceptance::Exclusive,
581 data: reset_page,
582 }));
583 }
584}