1use crate::Cpu;
7use crate::opcodes::Aarch64DecodeGroup;
8use crate::opcodes::Aarch64DecodeLoadStoreGroup;
9use crate::opcodes::LoadRegisterLiteral;
10use crate::opcodes::LoadStoreAtomic;
11use crate::opcodes::LoadStoreRegister;
12use crate::opcodes::LoadStoreRegisterPair;
13use crate::opcodes::decode_group;
14use aarch64defs::EsrEl2;
15use inspect::Inspect;
16use thiserror::Error;
17
18#[derive(Debug, Error)]
19pub enum Error<E> {
20 #[error("unknown instruction: {0:#x?}")]
21 UnsupportedInstruction(u32),
22 #[error("unsupported instruction group: {0:?} {1:#x?}")]
23 UnsupportedInstructionGroup(Aarch64DecodeGroup, u32),
24 #[error("unsupported load/store instruction: {0:?} {1:#x?}")]
25 UnsupportedLoadStoreInstruction(Aarch64DecodeLoadStoreGroup, u32),
26 #[error("unsupported instruction set (thumb)")]
27 UnsupportedInstructionSet,
28 #[error("memory access error - {1:?} @ {0:#x}")]
29 MemoryAccess(u64, OperationKind, #[source] E),
30}
31
32#[derive(Debug, Default, Inspect)]
33pub struct InterceptState {
34 pub instruction_bytes: [u8; 4],
35 pub instruction_byte_count: u8,
36 pub gpa: Option<u64>,
37 #[inspect(hex, with = "|&x| u64::from(x)")]
38 pub syndrome: EsrEl2,
39 pub interruption_pending: bool,
40}
41
42enum InternalError<E> {
43 Error(Box<Error<E>>),
45}
46
47impl<E> From<Error<E>> for InternalError<E> {
48 fn from(err: Error<E>) -> Self {
49 InternalError::Error(Box::new(err))
50 }
51}
52
53impl<E> From<Box<Error<E>>> for InternalError<E> {
54 fn from(err: Box<Error<E>>) -> Self {
55 InternalError::Error(err)
56 }
57}
58
59#[derive(Debug)]
60pub(crate) struct EmulatorOperations<T: Cpu> {
61 pub cpu: T,
62}
63
64impl<T: Cpu> EmulatorOperations<T> {
65 pub async fn read_instruction(
67 &mut self,
68 gva: u64,
69 data: &mut [u8],
70 ) -> Result<(), Box<Error<T::Error>>> {
71 self.cpu
72 .read_instruction(gva, data)
73 .await
74 .map_err(|err| Error::MemoryAccess(gva, OperationKind::Read, err))?;
75 Ok(())
76 }
77
78 pub async fn read_memory(
80 &mut self,
81 gva: u64,
82 data: &mut [u8],
83 ) -> Result<(), Box<Error<T::Error>>> {
84 self.cpu
85 .read_memory(gva, data)
86 .await
87 .map_err(|err| Error::MemoryAccess(gva, OperationKind::Read, err))?;
88 Ok(())
89 }
90
91 pub async fn read_physical_memory(
93 &mut self,
94 gpa: u64,
95 data: &mut [u8],
96 ) -> Result<(), Box<Error<T::Error>>> {
97 self.cpu
98 .read_physical_memory(gpa, data)
99 .await
100 .map_err(|err| Error::MemoryAccess(gpa, OperationKind::Read, err))?;
101 Ok(())
102 }
103
104 pub async fn write_memory(
106 &mut self,
107 gva: u64,
108 data: &[u8],
109 ) -> Result<(), Box<Error<T::Error>>> {
110 self.cpu
111 .write_memory(gva, data)
112 .await
113 .map_err(|err| Error::MemoryAccess(gva, OperationKind::Write, err))?;
114 Ok(())
115 }
116
117 pub async fn write_physical_memory(
119 &mut self,
120 gpa: u64,
121 data: &[u8],
122 ) -> Result<(), Box<Error<T::Error>>> {
123 self.cpu
124 .write_physical_memory(gpa, data)
125 .await
126 .map_err(|err| Error::MemoryAccess(gpa, OperationKind::Write, err))?;
127 Ok(())
128 }
129
130 pub async fn compare_and_write_memory(
132 &mut self,
133 gva: u64,
134 current: &[u8],
135 new: &[u8],
136 ) -> Result<bool, Box<Error<T::Error>>> {
137 let mut success = false;
138 self.cpu
139 .compare_and_write_memory(gva, current, new, &mut success)
140 .await
141 .map_err(|err| Error::MemoryAccess(gva, OperationKind::Write, err))?;
142 Ok(success)
143 }
144}
145
146#[derive(Debug)]
148pub struct Emulator<'a, T: Cpu> {
149 inner: EmulatorOperations<T>,
150 intercept_state: &'a InterceptState,
151}
152
153#[derive(Debug, Clone, Copy, PartialEq)]
154pub enum OperationKind {
155 Read,
156 Write,
157 AddressComputation,
158}
159
160impl<'a, T: Cpu> Emulator<'a, T> {
161 pub fn new(cpu: T, intercept_state: &'a InterceptState) -> Self {
163 Emulator {
164 inner: EmulatorOperations { cpu },
165 intercept_state,
166 }
167 }
168
169 fn advance_pc(&mut self, count: u64) {
170 let new_pc = self.inner.cpu.pc().wrapping_add(count);
171 self.inner.cpu.update_pc(new_pc);
172 }
173
174 async fn decode_with_syndrome(&mut self) -> Result<bool, InternalError<T::Error>> {
175 let Some(gpa) = self.intercept_state.gpa else {
176 return Ok(false);
177 };
178 let syndrome = self.intercept_state.syndrome;
179 if !matches!(
180 aarch64defs::ExceptionClass(syndrome.ec()),
181 aarch64defs::ExceptionClass::DATA_ABORT | aarch64defs::ExceptionClass::DATA_ABORT_LOWER
182 ) {
183 return Ok(false);
184 }
185 let iss = aarch64defs::IssDataAbort::from(syndrome.iss());
186 if !iss.isv() {
187 return Ok(false);
188 }
189 let len = 1 << iss.sas();
190 let sign_extend = iss.sse();
191
192 let reg_index = iss.srt();
205 if iss.wnr() {
206 let data = match reg_index {
207 0..=30 => self.inner.cpu.x(reg_index),
208 31 => 0_u64,
209 _ => unreachable!(),
210 }
211 .to_ne_bytes();
212 self.inner.write_physical_memory(gpa, &data[..len]).await?;
213 } else if reg_index != 31 {
214 let mut data = [0; 8];
215 self.inner
217 .read_physical_memory(gpa, &mut data[..len])
218 .await?;
219 let mut data = u64::from_ne_bytes(data);
220 if sign_extend {
221 let shift = 64 - len * 8;
222 data = ((data as i64) << shift >> shift) as u64;
223 if !iss.sf() {
224 data &= 0xffffffff;
225 }
226 }
227 self.inner.cpu.update_x(reg_index, data);
228 }
229 self.advance_pc(if syndrome.il() { 4 } else { 2 });
230 Ok(true)
231 }
232
233 pub async fn run(&mut self) -> Result<(), Box<Error<T::Error>>> {
234 match self.decode_with_syndrome().await {
235 Ok(false) => (),
236 Ok(true) => return Ok(()),
237 Err(InternalError::Error(err)) => {
238 tracing::error!(%err, "Error decoding access via syndrome");
239 }
240 };
241
242 let instruction = if self.intercept_state.instruction_byte_count > 0 {
244 if self.intercept_state.instruction_byte_count != 4 {
245 return Err(Box::new(Error::UnsupportedInstructionSet));
246 }
247 u32::from_ne_bytes(self.intercept_state.instruction_bytes)
248 } else {
249 let mut bytes = [0_u8; 4];
250 let pc = self.inner.cpu.pc();
251 self.inner.read_instruction(pc, &mut bytes[..]).await?;
252 u32::from_ne_bytes(bytes)
253 };
254 let instruction_type = decode_group(instruction)?;
255 match self.emulate(instruction, instruction_type).await {
256 Ok(()) => {
257 self.advance_pc(4);
258 Ok(())
259 }
260 Err(InternalError::Error(err)) => Err(err),
261 }
262 }
263
264 async fn emulate(
268 &mut self,
269 opcode: u32,
270 instruction_type: Aarch64DecodeGroup,
271 ) -> Result<(), InternalError<T::Error>> {
272 let result = match instruction_type {
275 Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::UnscaledImmediate)
276 | Aarch64DecodeGroup::LoadStore(
277 Aarch64DecodeLoadStoreGroup::RegisterUnscaledImmediate,
278 )
279 | Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::RegisterUnprivileged)
280 | Aarch64DecodeGroup::LoadStore(
281 Aarch64DecodeLoadStoreGroup::RegisterImmediatePostIndex,
282 )
283 | Aarch64DecodeGroup::LoadStore(
284 Aarch64DecodeLoadStoreGroup::RegisterImmediatePreIndex,
285 )
286 | Aarch64DecodeGroup::LoadStore(
287 Aarch64DecodeLoadStoreGroup::RegisterUnsignedImmediate,
288 )
289 | Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::RegisterOffset) => {
290 LoadStoreRegister(opcode).emulate(&mut self.inner).await
291 }
292 Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::RegisterLiteral) => {
293 LoadRegisterLiteral(opcode).emulate(&mut self.inner).await
294 }
295 Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::NoAllocatePair)
296 | Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::RegisterPairPostIndex)
297 | Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::RegisterPairOffset)
298 | Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::RegisterPairPreIndex) => {
299 LoadStoreRegisterPair(opcode).emulate(&mut self.inner).await
300 }
301 Aarch64DecodeGroup::LoadStore(Aarch64DecodeLoadStoreGroup::Atomic) => {
302 LoadStoreAtomic(opcode).emulate(&mut self.inner).await
303 }
304 Aarch64DecodeGroup::LoadStore(typ) => {
305 return Err(InternalError::Error(Box::new(
306 Error::UnsupportedLoadStoreInstruction(typ, opcode),
307 )));
308 }
309 group => {
310 return Err(InternalError::Error(Box::new(
311 Error::UnsupportedInstructionGroup(group, opcode),
312 )));
313 }
314 };
315 result.map_err(InternalError::Error)
316 }
317}