igvmfilegen/vp_context_builder/
tdx.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! TDX VP context builder.
5
6use super::VpContextBuilder;
7use super::VpContextState;
8use crate::vp_context_builder::VpContextPageState;
9use igvm_defs::PAGE_SIZE_4K;
10use loader::importer::SegmentRegister;
11use loader::importer::X86Register;
12use loader_defs::shim::TdxTrampolineContext;
13use std::mem::offset_of;
14use x86defs::X64_EFER_LME;
15use x86defs::X86X_MSR_DEFAULT_PAT;
16use zerocopy::IntoBytes;
17
18/// Represents a hardware context for TDX. This contains both the sets of
19/// initial registers and registers set by the trampoline code.
20#[derive(Debug)]
21pub struct TdxHardwareContext {
22    trampoline_context: TdxTrampolineContext,
23    accept_lower_1mb: bool,
24}
25
26impl TdxHardwareContext {
27    pub fn new(accept_lower_1mb: bool) -> Self {
28        Self {
29            trampoline_context: TdxTrampolineContext::default(),
30            accept_lower_1mb,
31        }
32    }
33}
34
35impl VpContextBuilder for TdxHardwareContext {
36    type Register = X86Register;
37
38    /// Import a register into the hardware context. Only a subset of registers
39    /// are allowed.
40    fn import_vp_register(&mut self, register: X86Register) {
41        let mut set_data_selector = |reg: SegmentRegister| {
42            if self.trampoline_context.data_selector == 0 {
43                self.trampoline_context.data_selector = reg.selector;
44            } else if self.trampoline_context.data_selector != reg.selector {
45                panic!("data selectors must be the same");
46            }
47        };
48
49        match register {
50            X86Register::Gdtr(reg) => {
51                self.trampoline_context.gdtr_base = reg.base;
52                self.trampoline_context.gdtr_limit = reg.limit;
53            }
54            X86Register::Idtr(reg) => {
55                self.trampoline_context.idtr_base = reg.base;
56                self.trampoline_context.idtr_limit = reg.limit;
57            }
58            X86Register::Ds(reg)
59            | X86Register::Es(reg)
60            | X86Register::Fs(reg)
61            | X86Register::Gs(reg)
62            | X86Register::Ss(reg) => set_data_selector(reg),
63            X86Register::Cs(reg) => self.trampoline_context.code_selector = reg.selector,
64            X86Register::Tr(reg) => {
65                self.trampoline_context.task_selector = reg.selector;
66            }
67            X86Register::Cr0(cr0) => self.trampoline_context.cr0 = cr0,
68            X86Register::Cr3(cr3) => {
69                let cr3_u32: u32 = cr3.try_into().expect("cr3 must fit in u32");
70                self.trampoline_context.transition_cr3 = cr3_u32;
71                self.trampoline_context.cr3 = cr3;
72            }
73            X86Register::Cr4(cr4) => self.trampoline_context.cr4 = cr4,
74            X86Register::Efer(efer) => {
75                // TDX guests are not permitted to set EFER explicitly.  Verify
76                // that the requested EFER value is compatible with the
77                // architecturally imposed value.
78                if efer & X64_EFER_LME == 0 {
79                    panic!("EFER LME must be set for tdx")
80                }
81            }
82            X86Register::Pat(pat) => {
83                if pat != X86X_MSR_DEFAULT_PAT {
84                    panic!("PAT must be default for tdx")
85                }
86            }
87            X86Register::Rbp(rbp) => self.trampoline_context.rbp = rbp,
88            X86Register::Rip(rip) => self.trampoline_context.initial_rip = rip,
89            X86Register::Rsi(rsi) => self.trampoline_context.rsi = rsi,
90            X86Register::Rsp(rsp) => self.trampoline_context.rsp = rsp,
91            X86Register::R8(r8) => self.trampoline_context.r8 = r8,
92            X86Register::R9(r9) => self.trampoline_context.r9 = r9,
93            X86Register::R10(r10) => self.trampoline_context.r10 = r10,
94            X86Register::R11(r11) => self.trampoline_context.r11 = r11,
95            X86Register::R12(_) => panic!("r12 not allowed for tdx"),
96            X86Register::Rflags(_) => panic!("rflags not allowed for tdx"),
97
98            X86Register::MtrrDefType(_)
99            | X86Register::MtrrPhysBase0(_)
100            | X86Register::MtrrPhysMask0(_)
101            | X86Register::MtrrPhysBase1(_)
102            | X86Register::MtrrPhysMask1(_)
103            | X86Register::MtrrPhysBase2(_)
104            | X86Register::MtrrPhysMask2(_)
105            | X86Register::MtrrPhysBase3(_)
106            | X86Register::MtrrPhysMask3(_)
107            | X86Register::MtrrPhysBase4(_)
108            | X86Register::MtrrPhysMask4(_)
109            | X86Register::MtrrFix64k00000(_)
110            | X86Register::MtrrFix16k80000(_)
111            | X86Register::MtrrFix4kE0000(_)
112            | X86Register::MtrrFix4kE8000(_)
113            | X86Register::MtrrFix4kF0000(_)
114            | X86Register::MtrrFix4kF8000(_) => {
115                tracing::warn!(?register, "Ignoring MTRR register for TDX.")
116            }
117        }
118    }
119
120    fn set_vp_context_memory(&mut self, _page_base: u64) {
121        unimplemented!("not supported for TDX");
122    }
123
124    fn finalize(&mut self, state: &mut Vec<VpContextState>) {
125        // Construct and load an initial temporary GDT to use for the transition
126        // to long mode.  A single selector (0008:) is defined as a 64-bit code
127        // segment.
128        self.trampoline_context.static_gdt[0x08] = 0xFF;
129        self.trampoline_context.static_gdt[0x09] = 0xFF;
130        self.trampoline_context.static_gdt[0x0D] = 0x9B;
131        self.trampoline_context.static_gdt[0x0E] = 0xA0;
132
133        self.trampoline_context.static_gdt_limit = 0xF;
134        self.trampoline_context.static_gdt_base =
135            0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt) as u32;
136
137        // Generate a 32-bit assembly trampoline to enable long mode and transfer
138        // to the specified context.
139        let mut byte_offset = 0xFF0;
140
141        // Fill the reset page with INT 3 as a standard code fill value.
142        let mut reset_page = vec![0xCCu8; PAGE_SIZE_4K as usize];
143
144        // Copy trampoline_context to the start of the reset page.
145        let trampoline_context = self.trampoline_context.as_bytes();
146        reset_page[0..trampoline_context.len()].copy_from_slice(trampoline_context);
147
148        let copy_instr =
149            |trampoline_page: &mut Vec<u8>, byte_offset, instruction: &[u8]| -> usize {
150                trampoline_page[byte_offset..byte_offset + instruction.len()]
151                    .copy_from_slice(instruction);
152                byte_offset + instruction.len()
153            };
154
155        // jmp InitialCode
156        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xE9]);
157        let mut relative_offset =
158            (trampoline_context.len() as u32).wrapping_sub((byte_offset + 4) as u32);
159        copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
160
161        byte_offset = trampoline_context.len();
162
163        // L0:
164        let l0_offset = byte_offset;
165
166        // test esi, esi
167        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
168
169        // jz mailbox_end
170        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
171        byte_offset += 1;
172        let mailbox_end = byte_offset;
173        let mailbox_begin = byte_offset;
174
175        // xor eax, eax
176        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x31, 0xC0]);
177
178        // mov ax, [mailbox_command]
179        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8b, 0x05]);
180        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, mailbox_command) as u32;
181        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
182
183        // mov dx, 01h
184        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0xba, 0x01, 0x00]);
185
186        // cmp ax, dx
187        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x39, 0xd0]);
188
189        // jne mailbox_begin
190        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
191        byte_offset += 1;
192        reset_page[byte_offset.wrapping_sub(1)] = (mailbox_begin.wrapping_sub(byte_offset)) as u8;
193
194        // cmp esi, [mailbox_apic_id]
195        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3b, 0x35]);
196        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, mailbox_apic_id) as u32;
197        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
198
199        // jne mailbox_begin
200        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
201        byte_offset += 1;
202        reset_page[byte_offset.wrapping_sub(1)] = (mailbox_begin.wrapping_sub(byte_offset)) as u8;
203
204        // mailbox_end:
205        reset_page[mailbox_end.wrapping_sub(1)] = (byte_offset.wrapping_sub(mailbox_end)) as u8;
206
207        // lgdt, [staticGdt]
208        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
209        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, static_gdt_limit) as u32;
210        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
211
212        // Load the control registers.  CR0 must be last so long mode is properly
213        // enabled (the architecture sets LME prior to initial entry), and the CR0
214        // load must be followed by a far jump to complete long mode
215        // configuration.
216
217        // mov eax, [initialCr4]
218        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
219        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr4) as u32;
220        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
221
222        // mov cr4, eax
223        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xE0]);
224
225        // mov eax, [transitionCr3]
226        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
227        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, transition_cr3) as u32;
228        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
229
230        // mov cr3, eax
231        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
232
233        // mov eax, [initialCr0]
234        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0x05]);
235        relative_offset = 0xFFFFF000 + offset_of!(TdxTrampolineContext, cr0) as u32;
236        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
237
238        // mov cr0, eax
239        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xC0]);
240
241        // jmp far L2
242        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0xEA]);
243        relative_offset = 0xFFFFF000 + byte_offset as u32 + 6;
244        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
245        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x08, 0x00]);
246
247        // L2:
248
249        // Load the 64-bit CR3 now that long mode is active.
250
251        // mov rax, [initialCr3]
252        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x05]);
253        relative_offset =
254            (offset_of!(TdxTrampolineContext, cr3) as u32).wrapping_sub((byte_offset + 4) as u32);
255        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
256
257        // mov cr3, rax
258        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x22, 0xD8]);
259
260        // Load descriptor tables and selectors, except CS which will be loaded in
261        // the final jump.  If no GDT is specified, then skip loading all
262        // selectors.
263
264        // mov ax, [initialGdtrLimit]
265        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
266        relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
267            .wrapping_sub((byte_offset + 4) as u32);
268        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
269
270        // test ax, ax
271        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
272
273        // jz L4
274        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
275        byte_offset += 1;
276        let l4_offset = byte_offset as u32;
277
278        // lgdt [initialGdtr]
279        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x15]);
280        relative_offset = (offset_of!(TdxTrampolineContext, gdtr_limit) as u32)
281            .wrapping_sub((byte_offset + 4) as u32);
282        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
283
284        // @@:
285        reset_page[l0_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l0_offset)) as u8;
286
287        // mov ax, [initialIdtrLimit]
288        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
289        relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
290            .wrapping_sub((byte_offset + 4) as u32);
291        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
292
293        // test ax, ax
294        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
295
296        // jz @f
297        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
298        byte_offset += 1;
299        let jump_offset = byte_offset;
300
301        // lidt [initialIdtr]
302        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x01, 0x1D]);
303        relative_offset = (offset_of!(TdxTrampolineContext, idtr_limit) as u32)
304            .wrapping_sub((byte_offset + 4) as u32);
305        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
306
307        // @@:
308        reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
309
310        // mov ax, [dataSelector]
311        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
312        relative_offset = (offset_of!(TdxTrampolineContext, data_selector) as u32)
313            .wrapping_sub((byte_offset + 4) as u32);
314        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
315
316        // mov ss, ax
317        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD0]);
318
319        // mov ds, ax
320        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xD8]);
321
322        // mov es, ax
323        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xC0]);
324
325        // mov fs, ax
326        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE0]);
327
328        // mov gs, ax
329        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8E, 0xE8]);
330
331        // mov ax, [taskSelector]
332        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
333        relative_offset = (offset_of!(TdxTrampolineContext, task_selector) as u32)
334            .wrapping_sub((byte_offset + 4) as u32);
335        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
336
337        // test ax, ax
338        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
339
340        // jz @f
341        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
342        byte_offset += 1;
343        let jump_offset = byte_offset;
344
345        // ltr ax
346        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x00, 0xD8]);
347
348        // @@:
349        reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
350
351        // L4:
352        reset_page[(l4_offset as usize).wrapping_sub(1)] =
353            (byte_offset.wrapping_sub(l4_offset as usize)) as u8;
354
355        // Execute TDG.MEM.PAGE.ACCEPT to accept the low 1 MB of the address
356        // space.  This is only required if the start context is in VTL 0, and
357        // only on the BSP.
358        if self.accept_lower_1mb {
359            // test esi, esi
360            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
361
362            // jnz L3
363            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x75]);
364            byte_offset += 1;
365            let l3_offset = byte_offset;
366
367            // L2:
368            // xor ecx, ecx
369            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xC9]);
370
371            // xor edx, edx
372            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x33, 0xD2]);
373
374            // mov edi, 0100000h
375            byte_offset = copy_instr(
376                &mut reset_page,
377                byte_offset,
378                &[0xBF, 0x00, 0x00, 0x10, 0x00],
379            );
380
381            // L1:
382            let jump_offset = byte_offset;
383
384            // mov eax, 06h
385            byte_offset = copy_instr(
386                &mut reset_page,
387                byte_offset,
388                &[0xB8, 0x06, 0x00, 0x00, 0x00],
389            );
390
391            // tdcall
392            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x0F, 0x01, 0xCC]);
393
394            // test rax, rax
395            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x85, 0xC0]);
396
397            // jne BreakPoint
398            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x0F, 0x85]);
399            byte_offset += 4;
400            let relative_offset = 0xFEF - byte_offset;
401            copy_instr(
402                &mut reset_page,
403                byte_offset.wrapping_sub(4),
404                relative_offset.as_bytes(),
405            );
406
407            // add ecx, 01000h
408            byte_offset = copy_instr(
409                &mut reset_page,
410                byte_offset,
411                &[0x81, 0xC1, 0x00, 0x10, 0x00, 0x00],
412            );
413
414            // cmp ecx, edi
415            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x3B, 0xCF]);
416
417            // jb L1
418            byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x72]);
419            byte_offset += 1;
420            reset_page[byte_offset.wrapping_sub(1)] = (jump_offset.wrapping_sub(byte_offset)) as u8;
421
422            // L3:
423            reset_page[l3_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l3_offset)) as u8;
424        }
425
426        // Load entry register state and transfer to the image.
427
428        // test esi, esi
429        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x85, 0xF6]);
430
431        // jz L7
432        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
433        byte_offset += 1;
434        let l7_offset = byte_offset;
435
436        // xor rax, rax
437        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x31, 0xC0]);
438
439        // mov [mailbox_command], ax
440        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x89, 0x05]);
441        relative_offset = (offset_of!(TdxTrampolineContext, mailbox_command) as u32)
442            .wrapping_sub((byte_offset + 4) as u32);
443        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
444
445        // mov rax, [mailbox_wakeup_vector]
446        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8b, 0x05]);
447        relative_offset = (offset_of!(TdxTrampolineContext, mailbox_wakeup_vector) as u32)
448            .wrapping_sub((byte_offset + 4) as u32);
449        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
450
451        // mov [initialRip], rax
452        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x89, 0x05]);
453        relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
454            .wrapping_sub((byte_offset + 4) as u32);
455        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
456
457        // L7:
458        reset_page[l7_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(l7_offset)) as u8;
459
460        // mov rsp, [initialRsp]
461        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x25]);
462        relative_offset =
463            (offset_of!(TdxTrampolineContext, rsp) as u32).wrapping_sub((byte_offset + 4) as u32);
464        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
465
466        // mov rbp, [initialRbp]
467        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x2D]);
468        relative_offset =
469            (offset_of!(TdxTrampolineContext, rbp) as u32).wrapping_sub((byte_offset + 4) as u32);
470        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
471
472        // mov ecx, esi
473        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x8B, 0xCE]);
474
475        // mov rsi, [initialRsi]
476        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0x8B, 0x35]);
477        relative_offset =
478            (offset_of!(TdxTrampolineContext, rsi) as u32).wrapping_sub((byte_offset + 4) as u32);
479        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
480
481        // mov r8, [initialR8]
482        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x05]);
483        relative_offset =
484            (offset_of!(TdxTrampolineContext, r8) as u32).wrapping_sub((byte_offset + 4) as u32);
485        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
486
487        // mov r9, [initialR9]
488        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x0D]);
489        relative_offset =
490            (offset_of!(TdxTrampolineContext, r9) as u32).wrapping_sub((byte_offset + 4) as u32);
491        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
492
493        // mov r10, [initialR10]
494        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x15]);
495        relative_offset =
496            (offset_of!(TdxTrampolineContext, r10) as u32).wrapping_sub((byte_offset + 4) as u32);
497        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
498
499        // mov r11, [initialR11]
500        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x4C, 0x8B, 0x1D]);
501        relative_offset =
502            (offset_of!(TdxTrampolineContext, r11) as u32).wrapping_sub((byte_offset + 4) as u32);
503        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
504
505        // mov ax, [initialCs]
506        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x8B, 0x05]);
507        relative_offset = (offset_of!(TdxTrampolineContext, code_selector) as u32)
508            .wrapping_sub((byte_offset + 4) as u32);
509        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
510
511        // test ax, ax
512        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x66, 0x85, 0xC0]);
513
514        // jz @f
515        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x74]);
516        byte_offset += 1;
517        let jump_offset = byte_offset;
518
519        // jmp far [initialRip]
520        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x2D]);
521        relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
522            .wrapping_sub((byte_offset + 4) as u32);
523        byte_offset = copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
524
525        // @@:
526        reset_page[jump_offset.wrapping_sub(1)] = (byte_offset.wrapping_sub(jump_offset)) as u8;
527
528        // jmp [initialRip]
529        byte_offset = copy_instr(&mut reset_page, byte_offset, &[0x48, 0xFF, 0x25]);
530        relative_offset = (offset_of!(TdxTrampolineContext, initial_rip) as u32)
531            .wrapping_sub((byte_offset + 4) as u32);
532        copy_instr(&mut reset_page, byte_offset, relative_offset.as_bytes());
533
534        // Add this data to the architectural reset page.
535        state.push(VpContextState::Page(VpContextPageState {
536            page_base: 0xFFFFF,
537            page_count: 1,
538            acceptance: loader::importer::BootPageAcceptance::Exclusive,
539            data: reset_page,
540        }));
541    }
542}