1use super::VpContextBuilder;
7use super::VpContextState;
8use crate::vp_context_builder::VpContextPageState;
9use igvm_defs::PAGE_SIZE_4K;
10use loader::importer::SegmentRegister;
11use loader::importer::X86Register;
12use loader_defs::shim::TdxTrampolineContext;
13use std::mem::offset_of;
14use x86defs::X64_EFER_LME;
15use x86defs::X86X_MSR_DEFAULT_PAT;
16use zerocopy::IntoBytes;
17
18#[derive(Debug)]
21pub struct TdxHardwareContext {
22 trampoline_context: TdxTrampolineContext,
23 accept_lower_1mb: bool,
24}
25
26impl TdxHardwareContext {
27 pub fn new(accept_lower_1mb: bool) -> Self {
28 Self {
29 trampoline_context: TdxTrampolineContext::default(),
30 accept_lower_1mb,
31 }
32 }
33}
34
35impl VpContextBuilder for TdxHardwareContext {
36 type Register = X86Register;
37
38 fn import_vp_register(&mut self, register: X86Register) {
41 let mut set_data_selector = |reg: SegmentRegister| {
42 if self.trampoline_context.data_selector == 0 {
43 self.trampoline_context.data_selector = reg.selector;
44 } else if self.trampoline_context.data_selector != reg.selector {
45 panic!("data selectors must be the same");
46 }
47 };
48
49 match register {
50 X86Register::Gdtr(reg) => {
51 self.trampoline_context.gdtr_base = reg.base;
52 self.trampoline_context.gdtr_limit = reg.limit;
53 }
54 X86Register::Idtr(reg) => {
55 self.trampoline_context.idtr_base = reg.base;
56 self.trampoline_context.idtr_limit = reg.limit;
57 }
58 X86Register::Ds(reg)
59 | X86Register::Es(reg)
60 | X86Register::Fs(reg)
61 | X86Register::Gs(reg)
62 | X86Register::Ss(reg) => set_data_selector(reg),
63 X86Register::Cs(reg) => self.trampoline_context.code_selector = reg.selector,
64 X86Register::Tr(reg) => {
65 self.trampoline_context.task_selector = reg.selector;
66 }
67 X86Register::Cr0(cr0) => self.trampoline_context.cr0 = cr0,
68 X86Register::Cr3(cr3) => {
69 let cr3_u32: u32 = cr3.try_into().expect("cr3 must fit in u32");
70 self.trampoline_context.transition_cr3 = cr3_u32;
71 self.trampoline_context.cr3 = cr3;
72 }
73 X86Register::Cr4(cr4) => self.trampoline_context.cr4 = cr4,
74 X86Register::Efer(efer) => {
75 if efer & X64_EFER_LME == 0 {
79 panic!("EFER LME must be set for tdx")
80 }
81 }
82 X86Register::Pat(pat) => {
83 if pat != X86X_MSR_DEFAULT_PAT {
84 panic!("PAT must be default for tdx")
85 }
86 }
87 X86Register::Rbp(rbp) => self.trampoline_context.rbp = rbp,
88 X86Register::Rip(rip) => self.trampoline_context.initial_rip = rip,
89 X86Register::Rsi(rsi) => self.trampoline_context.rsi = rsi,
90 X86Register::Rsp(rsp) => self.trampoline_context.rsp = rsp,
91 X86Register::R8(r8) => self.trampoline_context.r8 = r8,
92 X86Register::R9(r9) => self.trampoline_context.r9 = r9,
93 X86Register::R10(r10) => self.trampoline_context.r10 = r10,
94 X86Register::R11(r11) => self.trampoline_context.r11 = r11,
95 X86Register::R12(_) => panic!("r12 not allowed for tdx"),
96 X86Register::Rflags(_) => panic!("rflags not allowed for tdx"),
97
98 X86Register::MtrrDefType(_)
99 | X86Register::MtrrPhysBase0(_)
100 | X86Register::MtrrPhysMask0(_)
101 | X86Register::MtrrPhysBase1(_)
102 | X86Register::MtrrPhysMask1(_)
103 | X86Register::MtrrPhysBase2(_)
104 | X86Register::MtrrPhysMask2(_)
105 | X86Register::MtrrPhysBase3(_)
106 | X86Register::MtrrPhysMask3(_)
107 | X86Register::MtrrPhysBase4(_)
108 | X86Register::MtrrPhysMask4(_)
109 | X86Register::MtrrFix64k00000(_)
110 | X86Register::MtrrFix16k80000(_)
111 | X86Register::MtrrFix4kE0000(_)
112 | X86Register::MtrrFix4kE8000(_)
113 | X86Register::MtrrFix4kF0000(_)
114 | X86Register::MtrrFix4kF8000(_) => {
115 tracing::warn!(?register, "Ignoring MTRR register for TDX.")
116 }
117 }
118 }
119
120 fn set_vp_context_memory(&mut self, _page_base: u64) {
121 unimplemented!("not supported for TDX");
122 }
123
124 fn finalize(&mut self, state: &mut Vec<VpContextState>) {
125 self.trampoline_context.static_gdt[0x08] = 0xFF;
129 self.trampoline_context.static_gdt[0x09] = 0xFF;
130 self.trampoline_context.static_gdt[0x0D] = 0x9B;
131 self.trampoline_context.static_gdt[0x0E] = 0xA0;
132
133 self.trampoline_context.static_gdt_limit = 0xF;
134 self.trampoline_context.static_gdt_base =
135 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt) as u32;
136
137 let mut byte_offset = 0xFF0;
140
141 let mut reset_page = vec![0xCCu8; PAGE_SIZE_4K as usize];
143
144 let trampoline_context = self.trampoline_context.as_bytes();
146 reset_page[0..trampoline_context.len()].copy_from_slice(trampoline_context);
147
148 let copy_instr =
149 |trampoline_page: &mut Vec<u8>, byte_offset, instruction: &[u8]| -> usize {
150 trampoline_page[byte_offset..byte_offset + instruction.len()]
151 .copy_from_slice(instruction);
152 byte_offset + instruction.len()
153 };
154
155 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xE9]);
157 let mut relative_offset =
158 (trampoline_context.len() as u32).wrapping_sub((byte_offset + 4) as u32);
159 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
160
161 byte_offset = trampoline_context.len();
162
163 let l0_offset = byte_offset;
165
166 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
168 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt_limit) as u32;
169 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
170
171 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
178 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr4) as u32;
179 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
180
181 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xE0]);
183
184 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
186 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, transition_cr3) as u32;
187 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
188
189 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
191
192 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
194 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr0) as u32;
195 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
196
197 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xC0]);
199
200 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xEA]);
202 relative_offset = 0xFFFFF000 + byte_offset as u32 + 6;
203 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
204 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x08, 0x00]);
205
206 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x05]);
212 relative_offset =
213 (offset_of!(TdxTrampolineContext, cr3) as u32).wrapping_sub((byte_offset + 4) as u32);
214 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
215
216 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
218
219 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
225 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
226 .wrapping_sub((byte_offset + 4) as u32);
227 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
228
229 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
231
232 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
234 byte_offset += 1;
235 let l4_offset = byte_offset as u32;
236
237 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
239 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
240 .wrapping_sub((byte_offset + 4) as u32);
241 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
242
243 reset_page[l0_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l0_offset)) as u8;
245
246 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
248 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
249 .wrapping_sub((byte_offset + 4) as u32);
250 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
251
252 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
254
255 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
257 byte_offset += 1;
258 let jump_offset = byte_offset;
259
260 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x1D]);
262 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
263 .wrapping_sub((byte_offset + 4) as u32);
264 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
265
266 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
268
269 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
271 relative_offset = (offset_of!(TdxTrampolineContext, data_selector) as u32)
272 .wrapping_sub((byte_offset + 4) as u32);
273 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
274
275 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD0]);
277
278 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD8]);
280
281 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xC0]);
283
284 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE0]);
286
287 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE8]);
289
290 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
292 relative_offset = (offset_of!(TdxTrampolineContext, task_selector) as u32)
293 .wrapping_sub((byte_offset + 4) as u32);
294 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
295
296 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
298
299 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
301 byte_offset += 1;
302 let jump_offset = byte_offset;
303
304 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x00, 0xD8]);
306
307 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
309
310 reset_page[(l4_offset as usize).wrapping_sub(1)] =
312 (byte_offset.wrapping_sub(l4_offset as usize)) as u8;
313
314 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
316
317 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
321 byte_offset += 1;
322 let skip_mailbox_for_bsp = byte_offset;
323
324 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xC0]);
328
329 byte_offset = copy_instr(
331 &mut reset_page,
332 byte_offset,
333 &[0xB9, 0x00, 0x1C, 0x00, 0x00],
334 );
335
336 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4D, 0x31, 0xD2]);
338
339 byte_offset = copy_instr(
341 &mut reset_page,
342 byte_offset,
343 &[0x41, 0xBB, 0x1F, 0x00, 0x00, 0x00],
344 );
345
346 byte_offset = copy_instr(
348 &mut reset_page,
349 byte_offset,
350 &[0x41, 0xBC, 0x02, 0x08, 0x00, 0x00],
351 );
352
353 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
355
356 let mailbox_spinloop = byte_offset;
359 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
361 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_apic_id) as u32)
362 .wrapping_sub((byte_offset + 4) as u32);
363 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
364
365 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x41, 0x39, 0xC3]);
367
368 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
370 byte_offset += 1;
371 reset_page[byte_offset.wrapping_sub(1)] =
372 (mailbox_spinloop.wrapping_sub(byte_offset)) as u8;
373
374 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xDB]);
376
377 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x1D]);
379 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
380 .wrapping_sub((byte_offset + 4) as u32);
381 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
382
383 byte_offset = copy_instr(
385 &mut reset_page,
386 byte_offset,
387 &[0xBA, 0x01, 0x00, 0x00, 0x00],
388 );
389
390 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x39, 0xD3]);
392
393 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
395 byte_offset += 1;
396
397 reset_page[skip_mailbox_for_bsp.wrapping_sub(1)] =
399 (byte_offset.wrapping_sub(skip_mailbox_for_bsp)) as u8;
400
401 if self.accept_lower_1mb {
405 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
407
408 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
410 byte_offset += 1;
411 let l3_offset = byte_offset;
412
413 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xC9]);
416
417 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xD2]);
419
420 byte_offset = copy_instr(
422 &mut reset_page,
423 byte_offset,
424 &[0xBF, 0x00, 0x00, 0x10, 0x00],
425 );
426
427 let jump_offset = byte_offset;
429
430 byte_offset = copy_instr(
432 &mut reset_page,
433 byte_offset,
434 &[0xB8, 0x06, 0x00, 0x00, 0x00],
435 );
436
437 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
439
440 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x85, 0xC0]);
442
443 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x85]);
445 byte_offset += 4;
446 let relative_offset = 0xFEF - byte_offset;
447 copy_instr(
448 &mut reset_page,
449 byte_offset.wrapping_sub(4),
450 relative_offset.as_bytes(),
451 );
452
453 byte_offset = copy_instr(
455 &mut reset_page,
456 byte_offset,
457 &[0x81, 0xC1, 0x00, 0x10, 0x00, 0x00],
458 );
459
460 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3B, 0xCF]);
462
463 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x72]);
465 byte_offset += 1;
466 reset_page[byte_offset.wrapping_sub(1)] = (jump_offset.wrapping_sub(byte_offset)) as u8;
467
468 reset_page[l3_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l3_offset)) as u8;
470 }
471
472 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
476
477 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
479 byte_offset += 1;
480 let l7_offset = byte_offset;
481
482 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x31, 0xC0]);
484
485 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x89, 0x05]);
487 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
488 .wrapping_sub((byte_offset + 4) as u32);
489 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
490
491 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8b, 0x05]);
493 relative_offset = (offset_of!(TdxTrampolineContext, mailbox_wakeup_vector) as u32)
494 .wrapping_sub((byte_offset + 4) as u32);
495 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
496
497 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x89, 0x05]);
499 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
500 .wrapping_sub((byte_offset + 4) as u32);
501 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
502
503 reset_page[l7_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l7_offset)) as u8;
505
506 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x25]);
508 relative_offset =
509 (offset_of!(TdxTrampolineContext, rsp) as u32).wrapping_sub((byte_offset + 4) as u32);
510 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
511
512 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x2D]);
514 relative_offset =
515 (offset_of!(TdxTrampolineContext, rbp) as u32).wrapping_sub((byte_offset + 4) as u32);
516 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
517
518 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0xCE]);
520
521 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x35]);
523 relative_offset =
524 (offset_of!(TdxTrampolineContext, rsi) as u32).wrapping_sub((byte_offset + 4) as u32);
525 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
526
527 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x05]);
529 relative_offset =
530 (offset_of!(TdxTrampolineContext, r8) as u32).wrapping_sub((byte_offset + 4) as u32);
531 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
532
533 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x0D]);
535 relative_offset =
536 (offset_of!(TdxTrampolineContext, r9) as u32).wrapping_sub((byte_offset + 4) as u32);
537 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
538
539 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x15]);
541 relative_offset =
542 (offset_of!(TdxTrampolineContext, r10) as u32).wrapping_sub((byte_offset + 4) as u32);
543 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
544
545 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x1D]);
547 relative_offset =
548 (offset_of!(TdxTrampolineContext, r11) as u32).wrapping_sub((byte_offset + 4) as u32);
549 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
550
551 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
553 relative_offset = (offset_of!(TdxTrampolineContext, code_selector) as u32)
554 .wrapping_sub((byte_offset + 4) as u32);
555 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
556
557 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
559
560 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
562 byte_offset += 1;
563 let jump_offset = byte_offset;
564
565 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x2D]);
567 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
568 .wrapping_sub((byte_offset + 4) as u32);
569 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
570
571 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
573
574 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x25]);
576 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
577 .wrapping_sub((byte_offset + 4) as u32);
578 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
579
580 state.push(VpContextState::Page(VpContextPageState {
582 page_base: 0xFFFFF,
583 page_count: 1,
584 acceptance: loader::importer::BootPageAcceptance::Exclusive,
585 data: reset_page,
586 }));
587 }
588}