igvmfilegen/vp_context_builder/
tdx.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! TDX VP context builder.
5
6use super::VpContextBuilder;
7use super::VpContextState;
8use crate::vp_context_builder::VpContextPageState;
9use igvm_defs::PAGE_SIZE_4K;
10use loader::importer::SegmentRegister;
11use loader::importer::X86Register;
12use loader_defs::shim::TdxTrampolineContext;
13use std::mem::offset_of;
14use x86defs::X64_EFER_LME;
15use x86defs::X86X_MSR_DEFAULT_PAT;
16use zerocopy::IntoBytes;
17
18/// Represents a hardware context for TDX. This contains both the sets of
19/// initial registers and registers set by the trampoline code.
20#[derive(Debug)]
21pub struct TdxHardwareContext {
22    trampoline_context: TdxTrampolineContext,
23    accept_lower_1mb: bool,
24}
25
26impl TdxHardwareContext {
27    pub fn new(accept_lower_1mb: bool) -> Self {
28        Self {
29            trampoline_context: TdxTrampolineContext::default(),
30            accept_lower_1mb,
31        }
32    }
33}
34
35impl VpContextBuilder for TdxHardwareContext {
36    type Register = X86Register;
37
38    /// Import a register into the hardware context. Only a subset of registers
39    /// are allowed.
40    fn import_vp_register(&mut self, register: X86Register) {
41        let mut set_data_selector = |reg: SegmentRegister| {
42            if self.trampoline_context.data_selector == 0 {
43                self.trampoline_context.data_selector = reg.selector;
44            } else if self.trampoline_context.data_selector != reg.selector {
45                panic!("data selectors must be the same");
46            }
47        };
48
49        match register {
50            X86Register::Gdtr(reg) => {
51                self.trampoline_context.gdtr_base = reg.base;
52                self.trampoline_context.gdtr_limit = reg.limit;
53            }
54            X86Register::Idtr(reg) => {
55                self.trampoline_context.idtr_base = reg.base;
56                self.trampoline_context.idtr_limit = reg.limit;
57            }
58            X86Register::Ds(reg)
59            | X86Register::Es(reg)
60            | X86Register::Fs(reg)
61            | X86Register::Gs(reg)
62            | X86Register::Ss(reg) => set_data_selector(reg),
63            X86Register::Cs(reg) => self.trampoline_context.code_selector = reg.selector,
64            X86Register::Tr(reg) => {
65                self.trampoline_context.task_selector = reg.selector;
66            }
67            X86Register::Cr0(cr0) => self.trampoline_context.cr0 = cr0,
68            X86Register::Cr3(cr3) => {
69                let cr3_u32: u32 = cr3.try_into().expect("cr3 must fit in u32");
70                self.trampoline_context.transition_cr3 = cr3_u32;
71                self.trampoline_context.cr3 = cr3;
72            }
73            X86Register::Cr4(cr4) => self.trampoline_context.cr4 = cr4,
74            X86Register::Efer(efer) => {
75                // TDX guests are not permitted to set EFER explicitly.  Verify
76                // that the requested EFER value is compatible with the
77                // architecturally imposed value.
78                if efer & X64_EFER_LME == 0 {
79                    panic!("EFER LME must be set for tdx")
80                }
81            }
82            X86Register::Pat(pat) => {
83                if pat != X86X_MSR_DEFAULT_PAT {
84                    panic!("PAT must be default for tdx")
85                }
86            }
87            X86Register::Rbp(rbp) => self.trampoline_context.rbp = rbp,
88            X86Register::Rip(rip) => self.trampoline_context.initial_rip = rip,
89            X86Register::Rsi(rsi) => self.trampoline_context.rsi = rsi,
90            X86Register::Rsp(rsp) => self.trampoline_context.rsp = rsp,
91            X86Register::R8(r8) => self.trampoline_context.r8 = r8,
92            X86Register::R9(r9) => self.trampoline_context.r9 = r9,
93            X86Register::R10(r10) => self.trampoline_context.r10 = r10,
94            X86Register::R11(r11) => self.trampoline_context.r11 = r11,
95            X86Register::R12(_) => panic!("r12 not allowed for tdx"),
96            X86Register::Rflags(_) => panic!("rflags not allowed for tdx"),
97
98            X86Register::MtrrDefType(_)
99            | X86Register::MtrrPhysBase0(_)
100            | X86Register::MtrrPhysMask0(_)
101            | X86Register::MtrrPhysBase1(_)
102            | X86Register::MtrrPhysMask1(_)
103            | X86Register::MtrrPhysBase2(_)
104            | X86Register::MtrrPhysMask2(_)
105            | X86Register::MtrrPhysBase3(_)
106            | X86Register::MtrrPhysMask3(_)
107            | X86Register::MtrrPhysBase4(_)
108            | X86Register::MtrrPhysMask4(_)
109            | X86Register::MtrrFix64k00000(_)
110            | X86Register::MtrrFix16k80000(_)
111            | X86Register::MtrrFix4kE0000(_)
112            | X86Register::MtrrFix4kE8000(_)
113            | X86Register::MtrrFix4kF0000(_)
114            | X86Register::MtrrFix4kF8000(_) => {
115                tracing::warn!(?register, "Ignoring MTRR register for TDX.")
116            }
117        }
118    }
119
120    fn set_vp_context_memory(&mut self, _page_base: u64) {
121        unimplemented!("not supported for TDX");
122    }
123
124    fn finalize(&mut self, state: &mut Vec<VpContextState>) {
125        // Construct and load an initial temporary GDT to use for the transition
126        // to long mode.  A single selector (0008:) is defined as a 64-bit code
127        // segment.
128        self.trampoline_context.static_gdt[0x08] = 0xFF;
129        self.trampoline_context.static_gdt[0x09] = 0xFF;
130        self.trampoline_context.static_gdt[0x0D] = 0x9B;
131        self.trampoline_context.static_gdt[0x0E] = 0xA0;
132
133        self.trampoline_context.static_gdt_limit = 0xF;
134        self.trampoline_context.static_gdt_base =
135            0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt) as u32;
136
137        // Generate a 32-bit assembly trampoline to enable long mode and transfer
138        // to the specified context.
139        let mut byte_offset = 0xFF0;
140
141        // Fill the reset page with INT 3 as a standard code fill value.
142        let mut reset_page = vec![0xCCu8; PAGE_SIZE_4K as usize];
143
144        // Copy trampoline_context to the start of the reset page.
145        let trampoline_context = self.trampoline_context.as_bytes();
146        reset_page[0..trampoline_context.len()].copy_from_slice(trampoline_context);
147
148        let copy_instr =
149            |trampoline_page: &mut Vec<u8>, byte_offset, instruction: &[u8]| -> usize {
150                trampoline_page[byte_offset..byte_offset + instruction.len()]
151                    .copy_from_slice(instruction);
152                byte_offset + instruction.len()
153            };
154
155        // jmp InitialCode
156        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xE9]);
157        let mut relative_offset =
158            (trampoline_context.len() as u32).wrapping_sub((byte_offset + 4) as u32);
159        copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
160
161        byte_offset = trampoline_context.len();
162
163        // L0:
164        let l0_offset = byte_offset;
165
166        // lgdt, [staticGdt]
167        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
168        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt_limit) as u32;
169        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
170
171        // Load the control registers.  CR0 must be last so long mode is properly
172        // enabled (the architecture sets LME prior to initial entry), and the CR0
173        // load must be followed by a far jump to complete long mode
174        // configuration.
175
176        // mov eax, [initialCr4]
177        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
178        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr4) as u32;
179        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
180
181        // mov cr4, eax
182        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xE0]);
183
184        // mov eax, [transitionCr3]
185        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
186        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, transition_cr3) as u32;
187        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
188
189        // mov cr3, eax
190        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
191
192        // mov eax, [initialCr0]
193        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
194        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr0) as u32;
195        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
196
197        // mov cr0, eax
198        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xC0]);
199
200        // jmp far L2
201        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xEA]);
202        relative_offset = 0xFFFFF000 + byte_offset as u32 + 6;
203        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
204        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x08, 0x00]);
205
206        // L2:
207
208        // Load the 64-bit CR3 now that long mode is active.
209
210        // mov rax, [initialCr3]
211        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x05]);
212        relative_offset =
213            (offset_of!(TdxTrampolineContext, cr3) as u32).wrapping_sub((byte_offset + 4) as u32);
214        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
215
216        // mov cr3, rax
217        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
218
219        // Load descriptor tables and selectors, except CS which will be loaded in
220        // the final jump.  If no GDT is specified, then skip loading all
221        // selectors.
222
223        // mov ax, [initialGdtrLimit]
224        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
225        relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
226            .wrapping_sub((byte_offset + 4) as u32);
227        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
228
229        // test ax, ax
230        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
231
232        // jz L4
233        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
234        byte_offset += 1;
235        let l4_offset = byte_offset as u32;
236
237        // lgdt [initialGdtr]
238        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
239        relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
240            .wrapping_sub((byte_offset + 4) as u32);
241        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
242
243        // @@:
244        reset_page[l0_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l0_offset)) as u8;
245
246        // mov ax, [initialIdtrLimit]
247        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
248        relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
249            .wrapping_sub((byte_offset + 4) as u32);
250        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
251
252        // test ax, ax
253        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
254
255        // jz @f
256        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
257        byte_offset += 1;
258        let jump_offset = byte_offset;
259
260        // lidt [initialIdtr]
261        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x1D]);
262        relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
263            .wrapping_sub((byte_offset + 4) as u32);
264        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
265
266        // @@:
267        reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
268
269        // mov ax, [dataSelector]
270        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
271        relative_offset = (offset_of!(TdxTrampolineContext, data_selector) as u32)
272            .wrapping_sub((byte_offset + 4) as u32);
273        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
274
275        // mov ss, ax
276        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD0]);
277
278        // mov ds, ax
279        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD8]);
280
281        // mov es, ax
282        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xC0]);
283
284        // mov fs, ax
285        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE0]);
286
287        // mov gs, ax
288        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE8]);
289
290        // mov ax, [taskSelector]
291        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
292        relative_offset = (offset_of!(TdxTrampolineContext, task_selector) as u32)
293            .wrapping_sub((byte_offset + 4) as u32);
294        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
295
296        // test ax, ax
297        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
298
299        // jz @f
300        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
301        byte_offset += 1;
302        let jump_offset = byte_offset;
303
304        // ltr ax
305        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x00, 0xD8]);
306
307        // @@:
308        reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
309
310        // L4:
311        reset_page[(l4_offset as usize).wrapping_sub(1)] =
312            (byte_offset.wrapping_sub(l4_offset as usize)) as u8;
313
314        // test esi, esi
315        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
316
317        // Skip the mailbox spinloop if we are on the BSP
318
319        // jz skip_mailbox_for_bsp
320        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
321        byte_offset += 1;
322        let skip_mailbox_for_bsp = byte_offset;
323
324        // Read the APIC_ID of this AP with a TDG.VP.VMCALL hypercall
325
326        // xor eax, eax
327        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xC0]);
328
329        // mov ecx, 1c00
330        byte_offset = copy_instr(
331            &mut reset_page,
332            byte_offset,
333            &[0xB9, 0x00, 0x1C, 0x00, 0x00],
334        );
335
336        // xor r10, r10
337        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4D, 0x31, 0xD2]);
338
339        // mov r11d, 01f
340        byte_offset = copy_instr(
341            &mut reset_page,
342            byte_offset,
343            &[0x41, 0xBB, 0x1F, 0x00, 0x00, 0x00],
344        );
345
346        // mov r12d, 802h
347        byte_offset = copy_instr(
348            &mut reset_page,
349            byte_offset,
350            &[0x41, 0xBC, 0x02, 0x08, 0x00, 0x00],
351        );
352
353        // tdcall
354        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
355
356        // Spin until the kernel requests this AP to continue in the mailbox
357
358        let mailbox_spinloop = byte_offset;
359        // mov eax, [mailbox_apic_id]
360        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
361        relative_offset = (offset_of!(TdxTrampolineContext, mailbox_apic_id) as u32)
362            .wrapping_sub((byte_offset + 4) as u32);
363        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
364
365        //cmp r11d, eax
366        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x41, 0x39, 0xC3]);
367
368        // jne mailbox_spinloop
369        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
370        byte_offset += 1;
371        reset_page[byte_offset.wrapping_sub(1)] =
372            (mailbox_spinloop.wrapping_sub(byte_offset)) as u8;
373
374        // xor ebx, ebx
375        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xDB]);
376
377        // mov ebx, [mailbox_command]
378        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x1D]);
379        relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
380            .wrapping_sub((byte_offset + 4) as u32);
381        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
382
383        // mov edx, 01h
384        byte_offset = copy_instr(
385            &mut reset_page,
386            byte_offset,
387            &[0xBA, 0x01, 0x00, 0x00, 0x00],
388        );
389
390        // cmp ebx, edx
391        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x39, 0xD3]);
392
393        // jne mailbox_spinloop
394        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
395        byte_offset += 1;
396
397        // skip_mailbox_for_bsp:
398        reset_page[skip_mailbox_for_bsp.wrapping_sub(1)] =
399            (byte_offset.wrapping_sub(skip_mailbox_for_bsp)) as u8;
400
401        // Execute TDG.MEM.PAGE.ACCEPT to accept the low 1 MB of the address
402        // space.  This is only required if the start context is in VTL 0, and
403        // only on the BSP.
404        if self.accept_lower_1mb {
405            // test esi, esi
406            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
407
408            // jnz L3
409            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
410            byte_offset += 1;
411            let l3_offset = byte_offset;
412
413            // L2:
414            // xor ecx, ecx
415            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xC9]);
416
417            // xor edx, edx
418            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xD2]);
419
420            // mov edi, 0100000h
421            byte_offset = copy_instr(
422                &mut reset_page,
423                byte_offset,
424                &[0xBF, 0x00, 0x00, 0x10, 0x00],
425            );
426
427            // L1:
428            let jump_offset = byte_offset;
429
430            // mov eax, 06h
431            byte_offset = copy_instr(
432                &mut reset_page,
433                byte_offset,
434                &[0xB8, 0x06, 0x00, 0x00, 0x00],
435            );
436
437            // tdcall
438            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
439
440            // test rax, rax
441            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x85, 0xC0]);
442
443            // jne BreakPoint
444            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x85]);
445            byte_offset += 4;
446            let relative_offset = 0xFEF - byte_offset;
447            copy_instr(
448                &mut reset_page,
449                byte_offset.wrapping_sub(4),
450                relative_offset.as_bytes(),
451            );
452
453            // add ecx, 01000h
454            byte_offset = copy_instr(
455                &mut reset_page,
456                byte_offset,
457                &[0x81, 0xC1, 0x00, 0x10, 0x00, 0x00],
458            );
459
460            // cmp ecx, edi
461            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3B, 0xCF]);
462
463            // jb L1
464            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x72]);
465            byte_offset += 1;
466            reset_page[byte_offset.wrapping_sub(1)] = (jump_offset.wrapping_sub(byte_offset)) as u8;
467
468            // L3:
469            reset_page[l3_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l3_offset)) as u8;
470        }
471
472        // Load entry register state and transfer to the image.
473
474        // test esi, esi
475        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
476
477        // jz L7
478        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
479        byte_offset += 1;
480        let l7_offset = byte_offset;
481
482        // xor rax, rax
483        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x31, 0xC0]);
484
485        // mov [mailbox_command], ax
486        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x89, 0x05]);
487        relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
488            .wrapping_sub((byte_offset + 4) as u32);
489        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
490
491        // mov rax, [mailbox_wakeup_vector]
492        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8b, 0x05]);
493        relative_offset = (offset_of!(TdxTrampolineContext, mailbox_wakeup_vector) as u32)
494            .wrapping_sub((byte_offset + 4) as u32);
495        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
496
497        // mov [initialRip], rax
498        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x89, 0x05]);
499        relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
500            .wrapping_sub((byte_offset + 4) as u32);
501        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
502
503        // L7:
504        reset_page[l7_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l7_offset)) as u8;
505
506        // mov rsp, [initialRsp]
507        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x25]);
508        relative_offset =
509            (offset_of!(TdxTrampolineContext, rsp) as u32).wrapping_sub((byte_offset + 4) as u32);
510        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
511
512        // mov rbp, [initialRbp]
513        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x2D]);
514        relative_offset =
515            (offset_of!(TdxTrampolineContext, rbp) as u32).wrapping_sub((byte_offset + 4) as u32);
516        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
517
518        // mov ecx, esi
519        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0xCE]);
520
521        // mov rsi, [initialRsi]
522        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x35]);
523        relative_offset =
524            (offset_of!(TdxTrampolineContext, rsi) as u32).wrapping_sub((byte_offset + 4) as u32);
525        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
526
527        // mov r8, [initialR8]
528        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x05]);
529        relative_offset =
530            (offset_of!(TdxTrampolineContext, r8) as u32).wrapping_sub((byte_offset + 4) as u32);
531        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
532
533        // mov r9, [initialR9]
534        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x0D]);
535        relative_offset =
536            (offset_of!(TdxTrampolineContext, r9) as u32).wrapping_sub((byte_offset + 4) as u32);
537        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
538
539        // mov r10, [initialR10]
540        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x15]);
541        relative_offset =
542            (offset_of!(TdxTrampolineContext, r10) as u32).wrapping_sub((byte_offset + 4) as u32);
543        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
544
545        // mov r11, [initialR11]
546        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x1D]);
547        relative_offset =
548            (offset_of!(TdxTrampolineContext, r11) as u32).wrapping_sub((byte_offset + 4) as u32);
549        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
550
551        // mov ax, [initialCs]
552        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
553        relative_offset = (offset_of!(TdxTrampolineContext, code_selector) as u32)
554            .wrapping_sub((byte_offset + 4) as u32);
555        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
556
557        // test ax, ax
558        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
559
560        // jz @f
561        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
562        byte_offset += 1;
563        let jump_offset = byte_offset;
564
565        // jmp far [initialRip]
566        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x2D]);
567        relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
568            .wrapping_sub((byte_offset + 4) as u32);
569        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
570
571        // @@:
572        reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
573
574        // jmp [initialRip]
575        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x25]);
576        relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
577            .wrapping_sub((byte_offset + 4) as u32);
578        copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
579
580        // Add this data to the architectural reset page.
581        state.push(VpContextState::Page(VpContextPageState {
582            page_base: 0xFFFFF,
583            page_count: 1,
584            acceptance: loader::importer::BootPageAcceptance::Exclusive,
585            data: reset_page,
586        }));
587    }
588}