1use memory_range::MemoryRange;
7use thiserror::Error;
8
9const PAGE_SIZE: u64 = 4096;
10const FOUR_GB: u64 = 0x1_0000_0000;
11
12#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
14#[cfg_attr(feature = "mesh", derive(mesh_protobuf::Protobuf))]
15#[cfg_attr(feature = "inspect", derive(inspect::Inspect))]
16pub struct MemoryRangeWithNode {
17 pub range: MemoryRange,
19 pub vnode: u32,
21}
22
23impl core::fmt::Display for MemoryRangeWithNode {
24 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
25 write!(f, "{}({})", self.range, self.vnode)
26 }
27}
28
29#[derive(Debug, Clone)]
31#[cfg_attr(feature = "inspect", derive(inspect::Inspect))]
32pub struct MemoryLayout {
33 #[cfg_attr(feature = "inspect", inspect(with = "inspect_ranges_with_metadata"))]
34 ram: Vec<MemoryRangeWithNode>,
35 #[cfg_attr(feature = "inspect", inspect(with = "inspect_ranges"))]
36 mmio: Vec<MemoryRange>,
37 vtl2_range: Option<MemoryRange>,
40}
41
42#[cfg(feature = "inspect")]
43fn inspect_ranges(ranges: &[MemoryRange]) -> impl '_ + inspect::Inspect {
44 inspect::iter_by_key(ranges.iter().map(|range| {
45 (
46 range.to_string(),
47 inspect::adhoc(|i| {
48 i.respond().hex("length", range.len());
49 }),
50 )
51 }))
52}
53
54#[cfg(feature = "inspect")]
55fn inspect_ranges_with_metadata(ranges: &[MemoryRangeWithNode]) -> impl '_ + inspect::Inspect {
56 inspect::iter_by_key(ranges.iter().map(|range| {
57 (
58 range.range.to_string(),
59 inspect::adhoc(|i| {
60 i.respond()
61 .hex("length", range.range.len())
62 .hex("vnode", range.vnode);
63 }),
64 )
65 }))
66}
67
68#[derive(Debug, Error)]
70pub enum Error {
71 #[error("invalid memory size")]
73 BadSize,
74 #[error("invalid MMIO gap configuration")]
76 BadMmioGaps,
77 #[error("invalid memory or MMIO ranges")]
79 BadMemoryRanges,
80 #[error("vtl2 range is below end of ram")]
82 Vtl2RangeBeforeEndOfRam,
83}
84
85fn validate_ranges(ranges: &[MemoryRange]) -> Result<(), Error> {
86 validate_ranges_core(ranges, |x| x)
87}
88
89fn validate_ranges_with_metadata(ranges: &[MemoryRangeWithNode]) -> Result<(), Error> {
90 validate_ranges_core(ranges, |x| &x.range)
91}
92
93fn validate_ranges_core<T>(ranges: &[T], getter: impl Fn(&T) -> &MemoryRange) -> Result<(), Error> {
96 if ranges.iter().any(|x| getter(x).is_empty())
97 || !ranges.iter().zip(ranges.iter().skip(1)).all(|(x, y)| {
98 let x = getter(x);
99 let y = getter(y);
100 x <= y && !x.overlaps(y)
101 })
102 {
103 return Err(Error::BadMemoryRanges);
104 }
105
106 Ok(())
107}
108
109#[derive(Debug, Copy, Clone, PartialEq, Eq)]
111pub enum AddressType {
112 Ram,
114 Mmio,
116}
117
118impl MemoryLayout {
119 pub fn new(
130 ram_size: u64,
131 gaps: &[MemoryRange],
132 vtl2_range: Option<MemoryRange>,
133 ) -> Result<Self, Error> {
134 if ram_size == 0 || ram_size & (PAGE_SIZE - 1) != 0 {
135 return Err(Error::BadSize);
136 }
137
138 validate_ranges(gaps)?;
139 let mut ram = Vec::new();
140 let mut remaining = ram_size;
141 let mut remaining_gaps = gaps.iter().cloned();
142 let mut last_end = 0;
143
144 while remaining > 0 {
145 let (this, next_end) = if let Some(gap) = remaining_gaps.next() {
146 (remaining.min(gap.start() - last_end), gap.end())
147 } else {
148 (remaining, 0)
149 };
150
151 ram.push(MemoryRangeWithNode {
152 range: MemoryRange::new(last_end..last_end + this),
153 vnode: 0,
154 });
155 remaining -= this;
156 last_end = next_end;
157 }
158
159 Self::build(ram, gaps.to_vec(), vtl2_range)
160 }
161
162 pub fn new_from_ranges(
168 memory: &[MemoryRangeWithNode],
169 gaps: &[MemoryRange],
170 ) -> Result<Self, Error> {
171 validate_ranges_with_metadata(memory)?;
172 validate_ranges(gaps)?;
173 Self::build(memory.to_vec(), gaps.to_vec(), None)
174 }
175
176 fn build(
180 ram: Vec<MemoryRangeWithNode>,
181 mmio: Vec<MemoryRange>,
182 vtl2_range: Option<MemoryRange>,
183 ) -> Result<Self, Error> {
184 let mut all_ranges = ram
185 .iter()
186 .map(|x| &x.range)
187 .chain(&mmio)
188 .chain(&vtl2_range)
189 .copied()
190 .collect::<Vec<_>>();
191
192 all_ranges.sort();
193 validate_ranges(&all_ranges)?;
194
195 if all_ranges
196 .iter()
197 .zip(all_ranges.iter().skip(1))
198 .any(|(x, y)| x.overlaps(y))
199 {
200 return Err(Error::BadMemoryRanges);
201 }
202
203 let last_ram_entry = ram.last().ok_or(Error::BadMemoryRanges)?;
204 let end_of_ram = last_ram_entry.range.end();
205
206 if let Some(range) = vtl2_range {
207 if range.start() < end_of_ram {
208 return Err(Error::Vtl2RangeBeforeEndOfRam);
209 }
210 }
211
212 Ok(Self {
213 ram,
214 mmio,
215 vtl2_range,
216 })
217 }
218
219 pub fn mmio(&self) -> &[MemoryRange] {
221 &self.mmio
222 }
223
224 pub fn ram(&self) -> &[MemoryRangeWithNode] {
226 &self.ram
227 }
228
229 pub fn vtl2_range(&self) -> Option<MemoryRange> {
233 self.vtl2_range
234 }
235
236 pub fn ram_size(&self) -> u64 {
238 self.ram.iter().map(|r| r.range.len()).sum()
239 }
240
241 pub fn end_of_ram(&self) -> u64 {
243 self.ram.last().expect("mmio set").range.end()
245 }
246
247 pub fn ram_below_4gb(&self) -> u64 {
249 self.ram
250 .iter()
251 .filter(|r| r.range.end() < FOUR_GB)
252 .map(|r| r.range.len())
253 .sum()
254 }
255
256 pub fn ram_above_4gb(&self) -> u64 {
258 self.ram
259 .iter()
260 .filter(|r| r.range.end() >= FOUR_GB)
261 .map(|r| r.range.len())
262 .sum()
263 }
264
265 pub fn ram_above_high_mmio(&self) -> Option<u64> {
269 if self.mmio.len() != 2 {
270 return None;
271 }
272
273 Some(
274 self.ram
275 .iter()
276 .filter(|r| r.range.start() >= self.mmio[1].end())
277 .map(|r| r.range.len())
278 .sum(),
279 )
280 }
281
282 pub fn max_ram_below_4gb(&self) -> Option<u64> {
286 Some(
287 self.ram
288 .iter()
289 .rev()
290 .find(|r| r.range.end() < FOUR_GB)?
291 .range
292 .end(),
293 )
294 }
295
296 pub fn end_of_ram_or_mmio(&self) -> u64 {
298 std::cmp::max(self.mmio.last().expect("mmio set").end(), self.end_of_ram())
299 }
300
301 pub fn probe_address(&self, address: u64) -> Option<AddressType> {
307 let ranges = self
308 .ram
309 .iter()
310 .map(|r| (&r.range, AddressType::Ram))
311 .chain(self.mmio.iter().map(|r| (r, AddressType::Mmio)));
312
313 for (range, address_type) in ranges {
314 if range.contains_addr(address) {
315 return Some(address_type);
316 }
317 }
318
319 None
320 }
321}
322
323#[cfg(test)]
324mod tests {
325 use super::*;
326
327 const KB: u64 = 1024;
328 const MB: u64 = 1024 * KB;
329 const GB: u64 = 1024 * MB;
330 const TB: u64 = 1024 * GB;
331
332 #[test]
333 fn layout() {
334 let mmio = &[
335 MemoryRange::new(GB..2 * GB),
336 MemoryRange::new(3 * GB..4 * GB),
337 ];
338 let ram = &[
339 MemoryRangeWithNode {
340 range: MemoryRange::new(0..GB),
341 vnode: 0,
342 },
343 MemoryRangeWithNode {
344 range: MemoryRange::new(2 * GB..3 * GB),
345 vnode: 0,
346 },
347 MemoryRangeWithNode {
348 range: MemoryRange::new(4 * GB..TB + 2 * GB),
349 vnode: 0,
350 },
351 ];
352
353 let layout = MemoryLayout::new(TB, mmio, None).unwrap();
354 assert_eq!(
355 layout.ram(),
356 &[
357 MemoryRangeWithNode {
358 range: MemoryRange::new(0..GB),
359 vnode: 0
360 },
361 MemoryRangeWithNode {
362 range: MemoryRange::new(2 * GB..3 * GB),
363 vnode: 0
364 },
365 MemoryRangeWithNode {
366 range: MemoryRange::new(4 * GB..TB + 2 * GB),
367 vnode: 0
368 },
369 ]
370 );
371 assert_eq!(layout.mmio(), mmio);
372 assert_eq!(layout.ram_size(), TB);
373 assert_eq!(layout.end_of_ram(), TB + 2 * GB);
374
375 let layout = MemoryLayout::new_from_ranges(ram, mmio).unwrap();
376 assert_eq!(
377 layout.ram(),
378 &[
379 MemoryRangeWithNode {
380 range: MemoryRange::new(0..GB),
381 vnode: 0
382 },
383 MemoryRangeWithNode {
384 range: MemoryRange::new(2 * GB..3 * GB),
385 vnode: 0
386 },
387 MemoryRangeWithNode {
388 range: MemoryRange::new(4 * GB..TB + 2 * GB),
389 vnode: 0
390 },
391 ]
392 );
393 assert_eq!(layout.mmio(), mmio);
394 assert_eq!(layout.ram_size(), TB);
395 assert_eq!(layout.end_of_ram(), TB + 2 * GB);
396 }
397
398 #[test]
399 fn bad_layout() {
400 MemoryLayout::new(TB + 1, &[], None).unwrap_err();
401 let mmio = &[
402 MemoryRange::new(3 * GB..4 * GB),
403 MemoryRange::new(GB..2 * GB),
404 ];
405 MemoryLayout::new(TB, mmio, None).unwrap_err();
406
407 MemoryLayout::new_from_ranges(&[], mmio).unwrap_err();
408
409 let ram = &[MemoryRangeWithNode {
410 range: MemoryRange::new(0..GB),
411 vnode: 0,
412 }];
413 MemoryLayout::new_from_ranges(ram, mmio).unwrap_err();
414
415 let ram = &[MemoryRangeWithNode {
416 range: MemoryRange::new(0..GB + MB),
417 vnode: 0,
418 }];
419 let mmio = &[
420 MemoryRange::new(GB..2 * GB),
421 MemoryRange::new(3 * GB..4 * GB),
422 ];
423 MemoryLayout::new_from_ranges(ram, mmio).unwrap_err();
424 }
425
426 #[test]
427 fn probe_address() {
428 let mmio = &[
429 MemoryRange::new(GB..2 * GB),
430 MemoryRange::new(3 * GB..4 * GB),
431 ];
432 let ram = &[
433 MemoryRangeWithNode {
434 range: MemoryRange::new(0..GB),
435 vnode: 0,
436 },
437 MemoryRangeWithNode {
438 range: MemoryRange::new(2 * GB..3 * GB),
439 vnode: 0,
440 },
441 MemoryRangeWithNode {
442 range: MemoryRange::new(4 * GB..TB + 2 * GB),
443 vnode: 0,
444 },
445 ];
446
447 let layout = MemoryLayout::new_from_ranges(ram, mmio).unwrap();
448
449 assert_eq!(layout.probe_address(0), Some(AddressType::Ram));
450 assert_eq!(layout.probe_address(256), Some(AddressType::Ram));
451 assert_eq!(layout.probe_address(2 * GB), Some(AddressType::Ram));
452 assert_eq!(layout.probe_address(4 * GB), Some(AddressType::Ram));
453 assert_eq!(layout.probe_address(TB), Some(AddressType::Ram));
454 assert_eq!(layout.probe_address(TB + 1), Some(AddressType::Ram));
455
456 assert_eq!(layout.probe_address(GB), Some(AddressType::Mmio));
457 assert_eq!(layout.probe_address(GB + 123), Some(AddressType::Mmio));
458 assert_eq!(layout.probe_address(3 * GB), Some(AddressType::Mmio));
459
460 assert_eq!(layout.probe_address(TB + 2 * GB), None);
461 assert_eq!(layout.probe_address(TB + 3 * GB), None);
462 assert_eq!(layout.probe_address(4 * TB), None);
463 }
464}