1use memory_range::MemoryRange;
7use thiserror::Error;
8
9const PAGE_SIZE: u64 = 4096;
10const FOUR_GB: u64 = 0x1_0000_0000;
11
12#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
14#[cfg_attr(feature = "mesh", derive(mesh_protobuf::Protobuf))]
15#[cfg_attr(feature = "inspect", derive(inspect::Inspect))]
16pub struct MemoryRangeWithNode {
17 pub range: MemoryRange,
19 pub vnode: u32,
21}
22
23#[derive(Debug, Clone)]
25#[cfg_attr(feature = "inspect", derive(inspect::Inspect))]
26pub struct MemoryLayout {
27 #[cfg_attr(feature = "inspect", inspect(with = "inspect_ranges_with_metadata"))]
28 ram: Vec<MemoryRangeWithNode>,
29 #[cfg_attr(feature = "inspect", inspect(with = "inspect_ranges"))]
30 mmio: Vec<MemoryRange>,
31 vtl2_range: Option<MemoryRange>,
34}
35
36#[cfg(feature = "inspect")]
37fn inspect_ranges(ranges: &[MemoryRange]) -> impl '_ + inspect::Inspect {
38 inspect::iter_by_key(ranges.iter().map(|range| {
39 (
40 range.to_string(),
41 inspect::adhoc(|i| {
42 i.respond().hex("length", range.len());
43 }),
44 )
45 }))
46}
47
48#[cfg(feature = "inspect")]
49fn inspect_ranges_with_metadata(ranges: &[MemoryRangeWithNode]) -> impl '_ + inspect::Inspect {
50 inspect::iter_by_key(ranges.iter().map(|range| {
51 (
52 range.range.to_string(),
53 inspect::adhoc(|i| {
54 i.respond()
55 .hex("length", range.range.len())
56 .hex("vnode", range.vnode);
57 }),
58 )
59 }))
60}
61
62#[derive(Debug, Error)]
64pub enum Error {
65 #[error("invalid memory size")]
67 BadSize,
68 #[error("invalid MMIO gap configuration")]
70 BadMmioGaps,
71 #[error("invalid memory or MMIO ranges")]
73 BadMemoryRanges,
74 #[error("vtl2 range is below end of ram")]
76 Vtl2RangeBeforeEndOfRam,
77}
78
79fn validate_ranges(ranges: &[MemoryRange]) -> Result<(), Error> {
80 validate_ranges_core(ranges, |x| x)
81}
82
83fn validate_ranges_with_metadata(ranges: &[MemoryRangeWithNode]) -> Result<(), Error> {
84 validate_ranges_core(ranges, |x| &x.range)
85}
86
87fn validate_ranges_core<T>(ranges: &[T], getter: impl Fn(&T) -> &MemoryRange) -> Result<(), Error> {
90 if ranges.iter().any(|x| getter(x).is_empty())
91 || !ranges.iter().zip(ranges.iter().skip(1)).all(|(x, y)| {
92 let x = getter(x);
93 let y = getter(y);
94 x <= y && !x.overlaps(y)
95 })
96 {
97 return Err(Error::BadMemoryRanges);
98 }
99
100 Ok(())
101}
102
103#[derive(Debug, Copy, Clone, PartialEq, Eq)]
105pub enum AddressType {
106 Ram,
108 Mmio,
110}
111
112impl MemoryLayout {
113 pub fn new(
124 ram_size: u64,
125 gaps: &[MemoryRange],
126 vtl2_range: Option<MemoryRange>,
127 ) -> Result<Self, Error> {
128 if ram_size == 0 || ram_size & (PAGE_SIZE - 1) != 0 {
129 return Err(Error::BadSize);
130 }
131
132 validate_ranges(gaps)?;
133 let mut ram = Vec::new();
134 let mut remaining = ram_size;
135 let mut remaining_gaps = gaps.iter().cloned();
136 let mut last_end = 0;
137
138 while remaining > 0 {
139 let (this, next_end) = if let Some(gap) = remaining_gaps.next() {
140 (remaining.min(gap.start() - last_end), gap.end())
141 } else {
142 (remaining, 0)
143 };
144
145 ram.push(MemoryRangeWithNode {
146 range: MemoryRange::new(last_end..last_end + this),
147 vnode: 0,
148 });
149 remaining -= this;
150 last_end = next_end;
151 }
152
153 Self::build(ram, gaps.to_vec(), vtl2_range)
154 }
155
156 pub fn new_from_ranges(
162 memory: &[MemoryRangeWithNode],
163 gaps: &[MemoryRange],
164 ) -> Result<Self, Error> {
165 validate_ranges_with_metadata(memory)?;
166 validate_ranges(gaps)?;
167 Self::build(memory.to_vec(), gaps.to_vec(), None)
168 }
169
170 fn build(
174 ram: Vec<MemoryRangeWithNode>,
175 mmio: Vec<MemoryRange>,
176 vtl2_range: Option<MemoryRange>,
177 ) -> Result<Self, Error> {
178 let mut all_ranges = ram
179 .iter()
180 .map(|x| &x.range)
181 .chain(&mmio)
182 .chain(&vtl2_range)
183 .copied()
184 .collect::<Vec<_>>();
185
186 all_ranges.sort();
187 validate_ranges(&all_ranges)?;
188
189 if all_ranges
190 .iter()
191 .zip(all_ranges.iter().skip(1))
192 .any(|(x, y)| x.overlaps(y))
193 {
194 return Err(Error::BadMemoryRanges);
195 }
196
197 let last_ram_entry = ram.last().ok_or(Error::BadMemoryRanges)?;
198 let end_of_ram = last_ram_entry.range.end();
199
200 if let Some(range) = vtl2_range {
201 if range.start() < end_of_ram {
202 return Err(Error::Vtl2RangeBeforeEndOfRam);
203 }
204 }
205
206 Ok(Self {
207 ram,
208 mmio,
209 vtl2_range,
210 })
211 }
212
213 pub fn mmio(&self) -> &[MemoryRange] {
215 &self.mmio
216 }
217
218 pub fn ram(&self) -> &[MemoryRangeWithNode] {
220 &self.ram
221 }
222
223 pub fn vtl2_range(&self) -> Option<MemoryRange> {
227 self.vtl2_range
228 }
229
230 pub fn ram_size(&self) -> u64 {
232 self.ram.iter().map(|r| r.range.len()).sum()
233 }
234
235 pub fn end_of_ram(&self) -> u64 {
237 self.ram.last().expect("mmio set").range.end()
239 }
240
241 pub fn ram_below_4gb(&self) -> u64 {
243 self.ram
244 .iter()
245 .filter(|r| r.range.end() < FOUR_GB)
246 .map(|r| r.range.len())
247 .sum()
248 }
249
250 pub fn ram_above_4gb(&self) -> u64 {
252 self.ram
253 .iter()
254 .filter(|r| r.range.end() >= FOUR_GB)
255 .map(|r| r.range.len())
256 .sum()
257 }
258
259 pub fn ram_above_high_mmio(&self) -> Option<u64> {
263 if self.mmio.len() != 2 {
264 return None;
265 }
266
267 Some(
268 self.ram
269 .iter()
270 .filter(|r| r.range.start() >= self.mmio[1].end())
271 .map(|r| r.range.len())
272 .sum(),
273 )
274 }
275
276 pub fn max_ram_below_4gb(&self) -> Option<u64> {
280 Some(
281 self.ram
282 .iter()
283 .rev()
284 .find(|r| r.range.end() < FOUR_GB)?
285 .range
286 .end(),
287 )
288 }
289
290 pub fn end_of_ram_or_mmio(&self) -> u64 {
292 std::cmp::max(self.mmio.last().expect("mmio set").end(), self.end_of_ram())
293 }
294
295 pub fn probe_address(&self, address: u64) -> Option<AddressType> {
301 let ranges = self
302 .ram
303 .iter()
304 .map(|r| (&r.range, AddressType::Ram))
305 .chain(self.mmio.iter().map(|r| (r, AddressType::Mmio)));
306
307 for (range, address_type) in ranges {
308 if range.contains_addr(address) {
309 return Some(address_type);
310 }
311 }
312
313 None
314 }
315}
316
317#[cfg(test)]
318mod tests {
319 use super::*;
320
321 const KB: u64 = 1024;
322 const MB: u64 = 1024 * KB;
323 const GB: u64 = 1024 * MB;
324 const TB: u64 = 1024 * GB;
325
326 #[test]
327 fn layout() {
328 let mmio = &[
329 MemoryRange::new(GB..2 * GB),
330 MemoryRange::new(3 * GB..4 * GB),
331 ];
332 let ram = &[
333 MemoryRangeWithNode {
334 range: MemoryRange::new(0..GB),
335 vnode: 0,
336 },
337 MemoryRangeWithNode {
338 range: MemoryRange::new(2 * GB..3 * GB),
339 vnode: 0,
340 },
341 MemoryRangeWithNode {
342 range: MemoryRange::new(4 * GB..TB + 2 * GB),
343 vnode: 0,
344 },
345 ];
346
347 let layout = MemoryLayout::new(TB, mmio, None).unwrap();
348 assert_eq!(
349 layout.ram(),
350 &[
351 MemoryRangeWithNode {
352 range: MemoryRange::new(0..GB),
353 vnode: 0
354 },
355 MemoryRangeWithNode {
356 range: MemoryRange::new(2 * GB..3 * GB),
357 vnode: 0
358 },
359 MemoryRangeWithNode {
360 range: MemoryRange::new(4 * GB..TB + 2 * GB),
361 vnode: 0
362 },
363 ]
364 );
365 assert_eq!(layout.mmio(), mmio);
366 assert_eq!(layout.ram_size(), TB);
367 assert_eq!(layout.end_of_ram(), TB + 2 * GB);
368
369 let layout = MemoryLayout::new_from_ranges(ram, mmio).unwrap();
370 assert_eq!(
371 layout.ram(),
372 &[
373 MemoryRangeWithNode {
374 range: MemoryRange::new(0..GB),
375 vnode: 0
376 },
377 MemoryRangeWithNode {
378 range: MemoryRange::new(2 * GB..3 * GB),
379 vnode: 0
380 },
381 MemoryRangeWithNode {
382 range: MemoryRange::new(4 * GB..TB + 2 * GB),
383 vnode: 0
384 },
385 ]
386 );
387 assert_eq!(layout.mmio(), mmio);
388 assert_eq!(layout.ram_size(), TB);
389 assert_eq!(layout.end_of_ram(), TB + 2 * GB);
390 }
391
392 #[test]
393 fn bad_layout() {
394 MemoryLayout::new(TB + 1, &[], None).unwrap_err();
395 let mmio = &[
396 MemoryRange::new(3 * GB..4 * GB),
397 MemoryRange::new(GB..2 * GB),
398 ];
399 MemoryLayout::new(TB, mmio, None).unwrap_err();
400
401 MemoryLayout::new_from_ranges(&[], mmio).unwrap_err();
402
403 let ram = &[MemoryRangeWithNode {
404 range: MemoryRange::new(0..GB),
405 vnode: 0,
406 }];
407 MemoryLayout::new_from_ranges(ram, mmio).unwrap_err();
408
409 let ram = &[MemoryRangeWithNode {
410 range: MemoryRange::new(0..GB + MB),
411 vnode: 0,
412 }];
413 let mmio = &[
414 MemoryRange::new(GB..2 * GB),
415 MemoryRange::new(3 * GB..4 * GB),
416 ];
417 MemoryLayout::new_from_ranges(ram, mmio).unwrap_err();
418 }
419
420 #[test]
421 fn probe_address() {
422 let mmio = &[
423 MemoryRange::new(GB..2 * GB),
424 MemoryRange::new(3 * GB..4 * GB),
425 ];
426 let ram = &[
427 MemoryRangeWithNode {
428 range: MemoryRange::new(0..GB),
429 vnode: 0,
430 },
431 MemoryRangeWithNode {
432 range: MemoryRange::new(2 * GB..3 * GB),
433 vnode: 0,
434 },
435 MemoryRangeWithNode {
436 range: MemoryRange::new(4 * GB..TB + 2 * GB),
437 vnode: 0,
438 },
439 ];
440
441 let layout = MemoryLayout::new_from_ranges(ram, mmio).unwrap();
442
443 assert_eq!(layout.probe_address(0), Some(AddressType::Ram));
444 assert_eq!(layout.probe_address(256), Some(AddressType::Ram));
445 assert_eq!(layout.probe_address(2 * GB), Some(AddressType::Ram));
446 assert_eq!(layout.probe_address(4 * GB), Some(AddressType::Ram));
447 assert_eq!(layout.probe_address(TB), Some(AddressType::Ram));
448 assert_eq!(layout.probe_address(TB + 1), Some(AddressType::Ram));
449
450 assert_eq!(layout.probe_address(GB), Some(AddressType::Mmio));
451 assert_eq!(layout.probe_address(GB + 123), Some(AddressType::Mmio));
452 assert_eq!(layout.probe_address(3 * GB), Some(AddressType::Mmio));
453
454 assert_eq!(layout.probe_address(TB + 2 * GB), None);
455 assert_eq!(layout.probe_address(TB + 3 * GB), None);
456 assert_eq!(layout.probe_address(4 * TB), None);
457 }
458}