1use guestmem::ranges::PagedRange;
5use thiserror::Error;
6use zerocopy::FromBytes;
7use zerocopy::Immutable;
8use zerocopy::IntoBytes;
9use zerocopy::KnownLayout;
10
11const PAGE_SIZE: usize = 4096;
12
13#[repr(C)]
14#[derive(Copy, Clone, Debug, IntoBytes, Immutable, KnownLayout, FromBytes)]
15pub struct GpaRange {
16 pub len: u32,
17 pub offset: u32,
18}
19
20pub fn validate_gpa_ranges(count: usize, buf: &[u64]) -> Result<usize, Error> {
23 let mut rem: &[u64] = buf;
24 for _ in 0..count {
25 let (_, rest) = parse(rem)?;
26 rem = rest;
27 }
28 Ok(buf.len() - rem.len())
29}
30
31#[derive(Debug, Default, Clone)]
32pub struct MultiPagedRangeBuf {
33 buf: Box<[u64]>,
37 valid: usize,
41 count: usize,
43}
44
45impl MultiPagedRangeBuf {
46 pub fn from_range_buffer(count: usize, mut buf: Vec<u64>) -> Result<Self, Error> {
47 let valid = validate_gpa_ranges(count, buf.as_ref())?;
48 buf.truncate(valid);
49 Ok(MultiPagedRangeBuf {
50 buf: buf.into_boxed_slice(),
51 valid,
52 count,
53 })
54 }
55
56 pub fn new() -> Self {
57 Self::default()
58 }
59
60 pub fn iter(&self) -> MultiPagedRangeIter<'_> {
61 MultiPagedRangeIter {
62 buf: self.buf.as_ref(),
63 count: self.count,
64 }
65 }
66
67 pub fn range_count(&self) -> usize {
68 self.count
69 }
70
71 pub fn first(&self) -> Option<PagedRange<'_>> {
72 self.iter().next()
73 }
74
75 pub fn contiguous_aligned(&self) -> Option<PagedRange<'_>> {
78 if self.count != 1 {
79 return None;
80 }
81 let first = self.first()?;
82 if first.offset() != 0 || first.len() % PAGE_SIZE != 0 {
83 return None;
84 }
85 Some(first)
86 }
87
88 pub fn range_buffer(&self) -> &[u64] {
89 &self.buf[..self.valid]
90 }
91
92 pub fn clear(&mut self) {
94 self.valid = 0;
95 self.count = 0;
96 }
97
98 fn ensure_space(&mut self, additional: usize) -> &mut [u64] {
99 let required = self.valid + additional;
100 if required > self.buf.len() {
101 self.resize_buffer(required);
102 }
103 &mut self.buf[self.valid..required]
104 }
105
106 #[cold]
107 fn resize_buffer(&mut self, new_size: usize) {
108 let mut buf: Vec<u64> = std::mem::take(&mut self.buf).into();
111 buf.resize(new_size, 0);
112 buf.resize(buf.capacity(), 0);
114 self.buf = buf.into_boxed_slice();
115 }
116
117 pub fn push_range(&mut self, range: PagedRange<'_>) {
119 let len = 1 + range.gpns().len();
120 let buf = self.ensure_space(len);
121 let hdr = GpaRange {
122 len: range.len() as u32,
123 offset: range.offset() as u32,
124 };
125 buf[0] = zerocopy::transmute!(hdr);
126 buf[1..].copy_from_slice(range.gpns());
127 self.count += 1;
128 self.valid += len;
129 }
130
131 pub fn try_extend_with<E>(
140 &mut self,
141 len: usize,
142 count: usize,
143 f: impl FnOnce(&mut [u64]) -> Result<(), E>,
144 ) -> Result<Result<(), Error>, E> {
145 let buf = self.ensure_space(len);
146 f(buf)?;
147 let valid_len = match validate_gpa_ranges(count, buf) {
148 Ok(v) => v,
149 Err(e) => return Ok(Err(e)),
150 };
151 self.valid += valid_len;
155 self.count += count;
156 Ok(Ok(()))
157 }
158}
159
160impl<'a> IntoIterator for &'a MultiPagedRangeBuf {
161 type Item = PagedRange<'a>;
162 type IntoIter = MultiPagedRangeIter<'a>;
163
164 fn into_iter(self) -> Self::IntoIter {
165 self.iter()
166 }
167}
168
169impl<'a> FromIterator<PagedRange<'a>> for MultiPagedRangeBuf {
170 fn from_iter<I: IntoIterator<Item = PagedRange<'a>>>(iter: I) -> MultiPagedRangeBuf {
171 let mut this = MultiPagedRangeBuf::new();
172 for range in iter {
173 this.push_range(range);
174 }
175 this
176 }
177}
178
179#[derive(Clone, Debug)]
180pub struct MultiPagedRangeIter<'a> {
181 buf: &'a [u64],
182 count: usize,
183}
184
185impl<'a> Iterator for MultiPagedRangeIter<'a> {
186 type Item = PagedRange<'a>;
187
188 fn next(&mut self) -> Option<Self::Item> {
189 if self.count == 0 {
190 return None;
191 }
192 let hdr = GpaRange::read_from_prefix(self.buf[0].as_bytes())
193 .unwrap()
194 .0; let page_count = ((hdr.offset + hdr.len) as usize).div_ceil(PAGE_SIZE); let (this, rest) = self.buf.split_at(page_count + 1);
197 let range = PagedRange::new(hdr.offset as usize, hdr.len as usize, &this[1..]).unwrap();
198 self.count -= 1;
199 self.buf = rest;
200 Some(range)
201 }
202}
203
204#[derive(Debug, Error)]
205pub enum Error {
206 #[error("empty range")]
207 EmptyRange,
208 #[error("empty byte count")]
209 EmptyByteCount,
210 #[error("range too small")]
211 RangeTooSmall,
212 #[error("byte offset too large")]
213 OffsetTooLarge,
214 #[error("integer overflow")]
215 Overflow,
216}
217
218fn parse(buf: &[u64]) -> Result<(PagedRange<'_>, &[u64]), Error> {
219 let (hdr, gpas) = buf.split_first().ok_or(Error::EmptyRange)?;
220 let byte_count = *hdr as u32;
221 if byte_count == 0 {
222 return Err(Error::EmptyByteCount);
223 }
224 let byte_offset = (*hdr >> 32) as u32;
225 if byte_offset > 0xfff {
226 return Err(Error::OffsetTooLarge);
227 }
228 let pages = (byte_count
229 .checked_add(4095)
230 .ok_or(Error::Overflow)?
231 .checked_add(byte_offset)
232 .ok_or(Error::Overflow)?) as usize
233 / PAGE_SIZE;
234 if gpas.len() < pages {
235 return Err(Error::RangeTooSmall);
236 }
237 let (gpas, rest) = gpas.split_at(pages);
238 assert!(!gpas.is_empty());
239 Ok((
240 PagedRange::new(byte_offset as usize, byte_count as usize, gpas)
241 .expect("already validated"),
242 rest,
243 ))
244}
245
246#[cfg(test)]
247mod tests {
248 use super::*;
249
250 #[test]
251 fn large_offset() {
252 let hdr = GpaRange {
254 len: 1,
255 offset: 0x1000,
256 };
257 let buf = vec![
258 u64::from_le_bytes(hdr.as_bytes().try_into().unwrap()),
259 0xdead_beef,
260 ];
261
262 let err = MultiPagedRangeBuf::from_range_buffer(1, buf).unwrap_err();
264 assert!(matches!(err, Error::OffsetTooLarge));
265 }
266}