1use super::VpContextBuilder;
7use super::VpContextState;
8use crate::vp_context_builder::VpContextPageState;
9use igvm_defs::PAGE_SIZE_4K;
10use loader::importer::SegmentRegister;
11use loader::importer::X86Register;
12use std::mem::offset_of;
13use x86defs::X64_EFER_LME;
14use x86defs::X86X_MSR_DEFAULT_PAT;
15use zerocopy::Immutable;
16use zerocopy::IntoBytes;
17use zerocopy::KnownLayout;
18
19#[repr(C)]
26#[derive(Debug, Default, Clone, Copy, IntoBytes, Immutable, KnownLayout)]
27pub struct TdxTrampolineContext {
28 start_gate: u32,
29
30 data_selector: u16,
31 static_gdt_limit: u16,
32 static_gdt_base: u32,
33
34 task_selector: u16,
35 idtr_limit: u16,
36 idtr_base: u64,
37
38 initial_rip: u64,
39 code_selector: u16,
40 padding_2: [u16; 2],
41 gdtr_limit: u16,
42 gdtr_base: u64,
43
44 rsp: u64,
45 rbp: u64,
46 rsi: u64,
47 r8: u64,
48 r9: u64,
49 r10: u64,
50 r11: u64,
51 cr0: u64,
52 cr3: u64,
53 cr4: u64,
54 transition_cr3: u32,
55 padding_3: u32,
56
57 static_gdt: [u8; 16],
58}
59
60#[derive(Debug)]
63pub struct TdxHardwareContext {
64 trampoline_context: TdxTrampolineContext,
65 accept_lower_1mb: bool,
66}
67
68impl TdxHardwareContext {
69 pub fn new(accept_lower_1mb: bool) -> Self {
70 Self {
71 trampoline_context: TdxTrampolineContext::default(),
72 accept_lower_1mb,
73 }
74 }
75}
76
77impl VpContextBuilder for TdxHardwareContext {
78 type Register = X86Register;
79
80 fn import_vp_register(&mut self, register: X86Register) {
83 let mut set_data_selector = |reg: SegmentRegister| {
84 if self.trampoline_context.data_selector == 0 {
85 self.trampoline_context.data_selector = reg.selector;
86 } else if self.trampoline_context.data_selector != reg.selector {
87 panic!("data selectors must be the same");
88 }
89 };
90
91 match register {
92 X86Register::Gdtr(reg) => {
93 self.trampoline_context.gdtr_base = reg.base;
94 self.trampoline_context.gdtr_limit = reg.limit;
95 }
96 X86Register::Idtr(reg) => {
97 self.trampoline_context.idtr_base = reg.base;
98 self.trampoline_context.idtr_limit = reg.limit;
99 }
100 X86Register::Ds(reg)
101 | X86Register::Es(reg)
102 | X86Register::Fs(reg)
103 | X86Register::Gs(reg)
104 | X86Register::Ss(reg) => set_data_selector(reg),
105 X86Register::Cs(reg) => self.trampoline_context.code_selector = reg.selector,
106 X86Register::Tr(reg) => {
107 self.trampoline_context.task_selector = reg.selector;
108 }
109 X86Register::Cr0(cr0) => self.trampoline_context.cr0 = cr0,
110 X86Register::Cr3(cr3) => {
111 let cr3_u32: u32 = cr3.try_into().expect("cr3 must fit in u32");
112 self.trampoline_context.transition_cr3 = cr3_u32;
113 self.trampoline_context.cr3 = cr3;
114 }
115 X86Register::Cr4(cr4) => self.trampoline_context.cr4 = cr4,
116 X86Register::Efer(efer) => {
117 if efer & X64_EFER_LME == 0 {
121 panic!("EFER LME must be set for tdx")
122 }
123 }
124 X86Register::Pat(pat) => {
125 if pat != X86X_MSR_DEFAULT_PAT {
126 panic!("PAT must be default for tdx")
127 }
128 }
129 X86Register::Rbp(rbp) => self.trampoline_context.rbp = rbp,
130 X86Register::Rip(rip) => self.trampoline_context.initial_rip = rip,
131 X86Register::Rsi(rsi) => self.trampoline_context.rsi = rsi,
132 X86Register::Rsp(rsp) => self.trampoline_context.rsp = rsp,
133 X86Register::R8(r8) => self.trampoline_context.r8 = r8,
134 X86Register::R9(r9) => self.trampoline_context.r9 = r9,
135 X86Register::R10(r10) => self.trampoline_context.r10 = r10,
136 X86Register::R11(r11) => self.trampoline_context.r11 = r11,
137 X86Register::R12(_) => panic!("r12 not allowed for tdx"),
138 X86Register::Rflags(_) => panic!("rflags not allowed for tdx"),
139
140 X86Register::MtrrDefType(_)
141 | X86Register::MtrrPhysBase0(_)
142 | X86Register::MtrrPhysMask0(_)
143 | X86Register::MtrrPhysBase1(_)
144 | X86Register::MtrrPhysMask1(_)
145 | X86Register::MtrrPhysBase2(_)
146 | X86Register::MtrrPhysMask2(_)
147 | X86Register::MtrrPhysBase3(_)
148 | X86Register::MtrrPhysMask3(_)
149 | X86Register::MtrrPhysBase4(_)
150 | X86Register::MtrrPhysMask4(_)
151 | X86Register::MtrrFix64k00000(_)
152 | X86Register::MtrrFix16k80000(_)
153 | X86Register::MtrrFix4kE0000(_)
154 | X86Register::MtrrFix4kE8000(_)
155 | X86Register::MtrrFix4kF0000(_)
156 | X86Register::MtrrFix4kF8000(_) => {
157 tracing::warn!(?register, "Ignoring MTRR register for TDX.")
158 }
159 }
160 }
161
162 fn set_vp_context_memory(&mut self, _page_base: u64) {
163 unimplemented!("not supported for TDX");
164 }
165
166 fn finalize(&mut self, state: &mut Vec<VpContextState>) {
167 self.trampoline_context.static_gdt[0x08] = 0xFF;
171 self.trampoline_context.static_gdt[0x09] = 0xFF;
172 self.trampoline_context.static_gdt[0x0D] = 0x9B;
173 self.trampoline_context.static_gdt[0x0E] = 0xA0;
174
175 self.trampoline_context.static_gdt_limit = 0xF;
176 self.trampoline_context.static_gdt_base =
177 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt) as u32;
178
179 let mut byte_offset = 0xFF0;
182
183 let mut reset_page = vec![0xCCu8; PAGE_SIZE_4K as usize];
185
186 let trampoline_context = self.trampoline_context.as_bytes();
188 reset_page[0..trampoline_context.len()].copy_from_slice(trampoline_context);
189
190 let copy_instr =
191 |trampoline_page: &mut Vec<u8>, byte_offset, instruction: &[u8]| -> usize {
192 trampoline_page[byte_offset..byte_offset + instruction.len()]
193 .copy_from_slice(instruction);
194 byte_offset + instruction.len()
195 };
196
197 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xE9]);
199 let mut relative_offset =
200 (trampoline_context.len() as u32).wrapping_sub((byte_offset + 4) as u32);
201 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
202
203 byte_offset = trampoline_context.len();
204
205 let l0_offset = byte_offset;
208
209 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3B, 0x35]);
211 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, start_gate) as u32;
212 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
213
214 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
216 let jne_l0_offset = (l0_offset.wrapping_sub(byte_offset + 1)) as u8;
217 byte_offset = copy_instr(&mut reset_page, byte_offset, &[jne_l0_offset]);
218
219 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
221 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt_limit) as u32;
222 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
223
224 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
231 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr4) as u32;
232 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
233
234 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xE0]);
236
237 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
239 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, transition_cr3) as u32;
240 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
241
242 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
244
245 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
247 relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr0) as u32;
248 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
249
250 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xC0]);
252
253 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xEA]);
255 relative_offset = 0xFFFFF000 + byte_offset as u32 + 6;
256 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
257 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x08, 0x00]);
258
259 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x05]);
265 relative_offset =
266 (offset_of!(TdxTrampolineContext, cr3) as u32).wrapping_sub((byte_offset + 4) as u32);
267 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
268
269 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
271
272 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
278 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
279 .wrapping_sub((byte_offset + 4) as u32);
280 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
281
282 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
284
285 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
287 byte_offset += 1;
288 let l4_offset = byte_offset as u32;
289
290 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
292 relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
293 .wrapping_sub((byte_offset + 4) as u32);
294 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
295
296 reset_page[l0_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l0_offset)) as u8;
298
299 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
301 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
302 .wrapping_sub((byte_offset + 4) as u32);
303 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
304
305 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
307
308 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
310 byte_offset += 1;
311 let jump_offset = byte_offset;
312
313 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x1D]);
315 relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
316 .wrapping_sub((byte_offset + 4) as u32);
317 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
318
319 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
321
322 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
324 relative_offset = (offset_of!(TdxTrampolineContext, data_selector) as u32)
325 .wrapping_sub((byte_offset + 4) as u32);
326 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
327
328 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD0]);
330
331 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD8]);
333
334 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xC0]);
336
337 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE0]);
339
340 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE8]);
342
343 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
345 relative_offset = (offset_of!(TdxTrampolineContext, task_selector) as u32)
346 .wrapping_sub((byte_offset + 4) as u32);
347 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
348
349 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
351
352 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
354 byte_offset += 1;
355 let jump_offset = byte_offset;
356
357 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x00, 0xD8]);
359
360 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
362
363 reset_page[(l4_offset as usize).wrapping_sub(1)] =
365 (byte_offset.wrapping_sub(l4_offset as usize)) as u8;
366
367 if self.accept_lower_1mb {
371 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
373
374 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
376 byte_offset += 1;
377 let l3_offset = byte_offset;
378
379 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xC9]);
382
383 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xD2]);
385
386 byte_offset = copy_instr(
388 &mut reset_page,
389 byte_offset,
390 &[0xBF, 0x00, 0x00, 0x10, 0x00],
391 );
392
393 let jump_offset = byte_offset;
395
396 byte_offset = copy_instr(
398 &mut reset_page,
399 byte_offset,
400 &[0xB8, 0x06, 0x00, 0x00, 0x00],
401 );
402
403 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
405
406 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x85, 0xC0]);
408
409 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x85]);
411 byte_offset += 4;
412 let relative_offset = 0xFEF - byte_offset;
413 copy_instr(
414 &mut reset_page,
415 byte_offset.wrapping_sub(4),
416 relative_offset.as_bytes(),
417 );
418
419 byte_offset = copy_instr(
421 &mut reset_page,
422 byte_offset,
423 &[0x81, 0xC1, 0x00, 0x10, 0x00, 0x00],
424 );
425
426 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3B, 0xCF]);
428
429 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x72]);
431 byte_offset += 1;
432 reset_page[byte_offset.wrapping_sub(1)] = (jump_offset.wrapping_sub(byte_offset)) as u8;
433
434 reset_page[l3_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l3_offset)) as u8;
436 }
437
438 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x25]);
442 relative_offset =
443 (offset_of!(TdxTrampolineContext, rsp) as u32).wrapping_sub((byte_offset + 4) as u32);
444 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
445
446 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x2D]);
448 relative_offset =
449 (offset_of!(TdxTrampolineContext, rbp) as u32).wrapping_sub((byte_offset + 4) as u32);
450 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
451
452 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0xCE]);
454
455 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x35]);
457 relative_offset =
458 (offset_of!(TdxTrampolineContext, rsi) as u32).wrapping_sub((byte_offset + 4) as u32);
459 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
460
461 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x05]);
463 relative_offset =
464 (offset_of!(TdxTrampolineContext, r8) as u32).wrapping_sub((byte_offset + 4) as u32);
465 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
466
467 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x0D]);
469 relative_offset =
470 (offset_of!(TdxTrampolineContext, r9) as u32).wrapping_sub((byte_offset + 4) as u32);
471 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
472
473 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x15]);
475 relative_offset =
476 (offset_of!(TdxTrampolineContext, r10) as u32).wrapping_sub((byte_offset + 4) as u32);
477 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
478
479 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x1D]);
481 relative_offset =
482 (offset_of!(TdxTrampolineContext, r11) as u32).wrapping_sub((byte_offset + 4) as u32);
483 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
484
485 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
487 relative_offset = (offset_of!(TdxTrampolineContext, code_selector) as u32)
488 .wrapping_sub((byte_offset + 4) as u32);
489 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
490
491 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
493
494 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
496 byte_offset += 1;
497 let jump_offset = byte_offset;
498
499 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x2D]);
501 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
502 .wrapping_sub((byte_offset + 4) as u32);
503 byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
504
505 reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
507
508 byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x25]);
510 relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
511 .wrapping_sub((byte_offset + 4) as u32);
512 copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
513
514 state.push(VpContextState::Page(VpContextPageState {
516 page_base: 0xFFFFF,
517 page_count: 1,
518 acceptance: loader::importer::BootPageAcceptance::Exclusive,
519 data: reset_page,
520 }));
521 }
522}