1use super::HypercallIo;
7use crate::support::AsHandler;
8
9pub struct X64RegisterIo<T> {
11 inner: T,
12 is_64bit: bool,
13}
14
15impl<T: X64RegisterState> X64RegisterIo<T> {
16 pub fn new(t: T, is_64bit: bool) -> Self {
21 Self { inner: t, is_64bit }
22 }
23
24 fn gp_pair(&mut self, high: X64HypercallRegister, low: X64HypercallRegister) -> u64 {
25 (self.inner.gp(high) << 32) | (self.inner.gp(low) & 0xffff_ffff)
26 }
27
28 fn mask(&self, value: u64) -> u64 {
29 value
30 & if self.is_64bit {
31 u64::MAX
32 } else {
33 u32::MAX as u64
34 }
35 }
36
37 fn set_control(&mut self, control: u64) {
38 if self.is_64bit {
39 self.inner.set_gp(X64HypercallRegister::Rcx, control);
40 } else {
41 self.inner.set_gp(X64HypercallRegister::Rdx, control >> 32);
42 self.inner
43 .set_gp(X64HypercallRegister::Rax, control & u32::MAX as u64);
44 }
45 }
46}
47
48impl<T> AsHandler<T> for X64RegisterIo<T> {
49 fn as_handler(&mut self) -> &mut T {
50 &mut self.inner
51 }
52}
53
54impl<T> AsHandler<T> for X64RegisterIo<&mut T> {
55 fn as_handler(&mut self) -> &mut T {
56 &mut *self.inner
57 }
58}
59
60impl<T: X64RegisterState> HypercallIo for X64RegisterIo<T> {
61 fn advance_ip(&mut self) {
62 let rip = self.inner.rip().wrapping_add(3);
63 self.inner.set_rip(self.mask(rip));
64 }
65
66 fn retry(&mut self, control: u64) {
67 self.set_control(control)
69
70 }
72
73 fn control(&mut self) -> u64 {
74 if self.is_64bit {
75 self.inner.gp(X64HypercallRegister::Rcx)
76 } else {
77 self.gp_pair(X64HypercallRegister::Rdx, X64HypercallRegister::Rax)
78 }
79 }
80
81 fn vtl_input(&mut self) -> u64 {
82 let name = if self.is_64bit {
83 X64HypercallRegister::Rax
84 } else {
85 X64HypercallRegister::Rcx
86 };
87
88 let value = self.inner.gp(name);
89 self.mask(value)
90 }
91
92 fn set_result(&mut self, n: u64) {
93 if self.is_64bit {
94 self.inner.set_gp(X64HypercallRegister::Rax, n);
95 } else {
96 self.inner.set_gp(X64HypercallRegister::Rdx, n >> 32);
97 self.inner
98 .set_gp(X64HypercallRegister::Rax, n & u32::MAX as u64);
99 }
100 }
101
102 fn input_gpa(&mut self) -> u64 {
103 if self.is_64bit {
104 self.inner.gp(X64HypercallRegister::Rdx)
105 } else {
106 self.gp_pair(X64HypercallRegister::Rbx, X64HypercallRegister::Rcx)
107 }
108 }
109
110 fn output_gpa(&mut self) -> u64 {
111 if self.is_64bit {
112 self.inner.gp(X64HypercallRegister::R8)
113 } else {
114 self.gp_pair(X64HypercallRegister::Rdi, X64HypercallRegister::Rsi)
115 }
116 }
117
118 fn fast_register_pair_count(&mut self) -> usize {
119 if self.is_64bit { 7 } else { 1 }
120 }
121
122 fn extended_fast_hypercalls_ok(&mut self) -> bool {
123 self.is_64bit
124 }
125
126 fn fast_input(&mut self, buf: &mut [[u64; 2]], _output_register_pairs: usize) -> usize {
127 self.fast_regs(0, buf);
128 buf.len()
129 }
130
131 fn fast_output(&mut self, starting_pair_index: usize, buf: &[[u64; 2]]) {
132 for (i, &[low, high]) in buf.iter().enumerate() {
134 let index = i + starting_pair_index;
135 if index == 0 {
136 self.inner.set_gp(X64HypercallRegister::Rdx, low);
137 self.inner.set_gp(X64HypercallRegister::R8, high);
138 } else {
139 let x = low as u128 | ((high as u128) << 64);
140 self.inner.set_xmm(index - 1, x)
141 }
142 }
143 }
144
145 fn fast_regs(&mut self, starting_pair_index: usize, buf: &mut [[u64; 2]]) {
146 if self.is_64bit {
147 for (i, [low, high]) in buf.iter_mut().enumerate() {
148 let index = i + starting_pair_index;
149 if index == 0 {
150 *low = self.inner.gp(X64HypercallRegister::Rdx);
151 *high = self.inner.gp(X64HypercallRegister::R8);
152 } else {
153 let value = self.inner.xmm(index - 1);
154 *low = value as u64;
155 *high = (value >> 64) as u64;
156 }
157 }
158 } else if let [[low, high], ..] = buf {
159 *low = self.gp_pair(X64HypercallRegister::Rbx, X64HypercallRegister::Rcx);
160 *high = self.gp_pair(X64HypercallRegister::Rdi, X64HypercallRegister::Rsi);
161 }
162 }
163}
164
165pub trait X64RegisterState {
167 fn rip(&mut self) -> u64;
169
170 fn set_rip(&mut self, rip: u64);
172
173 fn gp(&mut self, n: X64HypercallRegister) -> u64;
175
176 fn set_gp(&mut self, n: X64HypercallRegister, value: u64);
178
179 fn xmm(&mut self, n: usize) -> u128;
181
182 fn set_xmm(&mut self, n: usize, value: u128);
184}
185
186impl<T: X64RegisterState> X64RegisterState for &'_ mut T {
187 fn rip(&mut self) -> u64 {
188 (**self).rip()
189 }
190
191 fn set_rip(&mut self, rip: u64) {
192 (**self).set_rip(rip)
193 }
194
195 fn gp(&mut self, n: X64HypercallRegister) -> u64 {
196 (**self).gp(n)
197 }
198
199 fn set_gp(&mut self, n: X64HypercallRegister, value: u64) {
200 (**self).set_gp(n, value)
201 }
202
203 fn xmm(&mut self, n: usize) -> u128 {
204 (**self).xmm(n)
205 }
206
207 fn set_xmm(&mut self, n: usize, value: u128) {
208 (**self).set_xmm(n, value)
209 }
210}
211
212pub enum X64HypercallRegister {
214 Rax,
216 Rcx,
218 Rdx,
220 Rbx,
222 Rsi,
224 Rdi,
226 R8,
228}
229
230#[cfg(test)]
231mod tests {
232 use super::*;
233 use crate::tests::TestHypercallIo;
234 use crate::tests::TestRegisterState;
235
236 impl<T: X64RegisterState + TestRegisterState> TestHypercallIo for X64RegisterIo<T> {
238 fn get_result(&mut self) -> u64 {
239 if self.is_64bit {
240 self.inner.gp(X64HypercallRegister::Rax)
241 } else {
242 self.gp_pair(X64HypercallRegister::Rdx, X64HypercallRegister::Rax)
243 }
244 }
245
246 fn set_control(&mut self, control: u64) {
247 X64RegisterIo::set_control(self, control);
248 }
249
250 fn set_input_gpa(&mut self, gpa: u64) {
251 if self.is_64bit {
252 self.inner.set_gp(X64HypercallRegister::Rdx, gpa);
253 } else {
254 self.inner.set_gp(X64HypercallRegister::Rbx, gpa >> 32);
255 self.inner
256 .set_gp(X64HypercallRegister::Rcx, gpa & u32::MAX as u64);
257 }
258 }
259
260 fn set_output_gpa(&mut self, gpa: u64) {
261 if self.is_64bit {
262 self.inner.set_gp(X64HypercallRegister::R8, gpa);
263 } else {
264 self.inner.set_gp(X64HypercallRegister::Rdi, gpa >> 32);
265 self.inner
266 .set_gp(X64HypercallRegister::Rsi, gpa & u32::MAX as u64);
267 }
268 }
269
270 fn set_fast_input(&mut self, buf: &[[u64; 2]]) {
271 if self.is_64bit {
272 let (gp, xmm) = buf.split_at(1);
273 let rdx = gp[0][0];
274 let r8 = gp[0][1];
275 self.inner.set_gp(X64HypercallRegister::Rdx, rdx);
276 self.inner.set_gp(X64HypercallRegister::R8, r8);
277 for (i, [low, high]) in xmm.iter().enumerate() {
278 let value = *low as u128 | ((*high as u128) << 64);
279 self.inner.set_xmm(i, value);
280 }
281 } else {
282 let [low, high] = buf[0];
283 self.inner.set_gp(X64HypercallRegister::Rbx, low >> 32);
284 self.inner
285 .set_gp(X64HypercallRegister::Rcx, low & u32::MAX as u64);
286 self.inner.set_gp(X64HypercallRegister::Rdi, high >> 32);
287 self.inner
288 .set_gp(X64HypercallRegister::Rsi, high & u32::MAX as u64);
289 }
290 }
291
292 fn get_fast_output(&mut self, input_register_pairs: usize, buf: &mut [[u64; 2]]) {
293 for (i, [low, high]) in buf.iter_mut().enumerate() {
295 if i + input_register_pairs == 0 {
296 *low = self.inner.gp(X64HypercallRegister::Rdx);
297 *high = self.inner.gp(X64HypercallRegister::R8);
298 } else {
299 let x = self.inner.xmm(i + input_register_pairs - 1);
300 *low = x as u64;
301 *high = (x >> 64) as u64;
302 }
303 }
304 }
305
306 fn get_modified_mask(&self) -> u64 {
307 self.inner.get_modified_mask()
308 }
309
310 fn clear_modified_mask(&mut self) {
311 self.inner.clear_modified_mask()
312 }
313
314 fn get_io_register_mask(&self) -> u64 {
315 if self.is_64bit {
316 1u64 << X64HypercallRegister::Rcx as usize
317 | 1u64 << X64HypercallRegister::Rax as usize
318 } else {
319 1u64 << X64HypercallRegister::Rdx as usize
320 | 1u64 << X64HypercallRegister::Rax as usize
321 }
322 }
323
324 fn get_name(&self) -> String {
325 format!("x86_{}", if self.is_64bit { "64" } else { "32" })
326 }
327
328 fn set_vtl_input(&mut self, vtl_input: u64) {
329 if self.is_64bit {
330 self.inner.set_gp(X64HypercallRegister::Rax, vtl_input);
331 } else {
332 self.inner
333 .set_gp(X64HypercallRegister::Rcx, vtl_input >> 32);
334 }
335 }
336
337 fn auto_advance_ip(&mut self) {}
338 }
339}