1use crate::host_params::MAX_VTL2_RAM_RANGES;
7use arrayvec::ArrayVec;
8use host_fdt_parser::MemoryEntry;
9#[cfg(test)]
10use igvm_defs::MemoryMapEntryType;
11use loader_defs::shim::MemoryVtlType;
12use memory_range::MemoryRange;
13use memory_range::RangeWalkResult;
14use memory_range::walk_ranges;
15use thiserror::Error;
16
17const PAGE_SIZE_4K: u64 = 4096;
18
19pub const MAX_RESERVED_MEM_RANGES: usize = 6 + sidecar_defs::MAX_NODES;
22
23const MAX_MEMORY_RANGES: usize = MAX_VTL2_RAM_RANGES + MAX_RESERVED_MEM_RANGES;
24
25const MAX_ADDRESS_RANGES: usize = MAX_MEMORY_RANGES * 2;
28
29#[derive(Clone, Copy, Debug, PartialEq, Eq)]
30pub enum ReservedMemoryType {
31 Vtl2Config,
33 Vtl2Reserved,
36 SidecarImage,
38 SidecarNode,
40 Vtl2GpaPool,
44 TdxPageTables,
46 BootshimLogBuffer,
48 PersistedStateHeader,
50 PersistedStatePayload,
52}
53
54impl From<ReservedMemoryType> for MemoryVtlType {
55 fn from(r: ReservedMemoryType) -> Self {
56 match r {
57 ReservedMemoryType::Vtl2Config => MemoryVtlType::VTL2_CONFIG,
58 ReservedMemoryType::SidecarImage => MemoryVtlType::VTL2_SIDECAR_IMAGE,
59 ReservedMemoryType::SidecarNode => MemoryVtlType::VTL2_SIDECAR_NODE,
60 ReservedMemoryType::Vtl2Reserved => MemoryVtlType::VTL2_RESERVED,
61 ReservedMemoryType::Vtl2GpaPool => MemoryVtlType::VTL2_GPA_POOL,
62 ReservedMemoryType::TdxPageTables => MemoryVtlType::VTL2_TDX_PAGE_TABLES,
63 ReservedMemoryType::BootshimLogBuffer => MemoryVtlType::VTL2_BOOTSHIM_LOG_BUFFER,
64 ReservedMemoryType::PersistedStateHeader => MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
65 ReservedMemoryType::PersistedStatePayload => {
66 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF
67 }
68 }
69 }
70}
71
72#[derive(Clone, Copy, Debug, PartialEq, Eq)]
73enum AddressUsage {
74 Free,
76 Used,
78 Reserved(ReservedMemoryType),
80}
81
82#[derive(Debug)]
83struct AddressRange {
84 range: MemoryRange,
85 vnode: u32,
86 usage: AddressUsage,
87}
88
89impl From<AddressUsage> for MemoryVtlType {
90 fn from(usage: AddressUsage) -> Self {
91 match usage {
92 AddressUsage::Free => MemoryVtlType::VTL2_RAM,
93 AddressUsage::Used => MemoryVtlType::VTL2_RAM,
94 AddressUsage::Reserved(r) => r.into(),
95 }
96 }
97}
98
99#[derive(Debug, Clone, Copy)]
100pub struct AllocatedRange {
101 pub range: MemoryRange,
102 pub vnode: u32,
103}
104
105#[derive(Debug, Error)]
106pub enum Error {
107 #[error("ram len {len} greater than maximum {max}")]
108 RamLen { len: u64, max: u64 },
109 #[error("already initialized")]
110 AlreadyInitialized,
111 #[error(
112 "reserved range {reserved:#x?}, type {typ:?} outside of bootshim used {bootshim_used:#x?}"
113 )]
114 ReservedRangeOutsideBootshimUsed {
115 reserved: MemoryRange,
116 typ: ReservedMemoryType,
117 bootshim_used: MemoryRange,
118 },
119}
120
121#[derive(Debug)]
122pub struct AddressSpaceManager {
123 address_space: ArrayVec<AddressRange, MAX_ADDRESS_RANGES>,
125
126 vtl2_pool: bool,
128}
129
130pub struct AddressSpaceManagerBuilder<'a, I: Iterator<Item = MemoryRange>> {
132 manager: &'a mut AddressSpaceManager,
133 vtl2_ram: &'a [MemoryEntry],
134 bootshim_used: MemoryRange,
135 persisted_state_region: MemoryRange,
136 vtl2_config: I,
137 reserved_range: Option<MemoryRange>,
138 sidecar_image: Option<MemoryRange>,
139 page_tables: Option<MemoryRange>,
140 log_buffer: Option<MemoryRange>,
141 pool_range: Option<MemoryRange>,
142}
143
144impl<'a, I: Iterator<Item = MemoryRange>> AddressSpaceManagerBuilder<'a, I> {
145 pub fn new(
158 manager: &'a mut AddressSpaceManager,
159 vtl2_ram: &'a [MemoryEntry],
160 bootshim_used: MemoryRange,
161 persisted_state_region: MemoryRange,
162 vtl2_config: I,
163 ) -> AddressSpaceManagerBuilder<'a, I> {
164 AddressSpaceManagerBuilder {
165 manager,
166 vtl2_ram,
167 bootshim_used,
168 persisted_state_region,
169 vtl2_config,
170 reserved_range: None,
171 sidecar_image: None,
172 page_tables: None,
173 log_buffer: None,
174 pool_range: None,
175 }
176 }
177
178 pub fn with_reserved_range(mut self, reserved_range: MemoryRange) -> Self {
180 self.reserved_range = Some(reserved_range);
181 self
182 }
183
184 pub fn with_sidecar_image(mut self, sidecar_image: MemoryRange) -> Self {
186 self.sidecar_image = Some(sidecar_image);
187 self
188 }
189
190 pub fn with_page_tables(mut self, page_tables: MemoryRange) -> Self {
192 self.page_tables = Some(page_tables);
193 self
194 }
195
196 pub fn with_log_buffer(mut self, log_buffer: MemoryRange) -> Self {
198 self.log_buffer = Some(log_buffer);
199 self
200 }
201
202 pub fn with_pool_range(mut self, pool_range: MemoryRange) -> Self {
204 self.pool_range = Some(pool_range);
205 self
206 }
207
208 pub fn init(self) -> Result<&'a mut AddressSpaceManager, Error> {
210 let Self {
211 manager,
212 vtl2_ram,
213 bootshim_used,
214 persisted_state_region,
215 vtl2_config,
216 reserved_range,
217 sidecar_image,
218 page_tables,
219 log_buffer,
220 pool_range,
221 } = self;
222
223 if vtl2_ram.len() > MAX_VTL2_RAM_RANGES {
224 return Err(Error::RamLen {
225 len: vtl2_ram.len() as u64,
226 max: MAX_VTL2_RAM_RANGES as u64,
227 });
228 }
229
230 if !manager.address_space.is_empty() {
231 return Err(Error::AlreadyInitialized);
232 }
233
234 let (persisted_header, persisted_payload) =
238 persisted_state_region.split_at_offset(PAGE_SIZE_4K);
239
240 let mut reserved: ArrayVec<(MemoryRange, ReservedMemoryType), 20> = ArrayVec::new();
242 reserved.push((persisted_header, ReservedMemoryType::PersistedStateHeader));
243 reserved.push((persisted_payload, ReservedMemoryType::PersistedStatePayload));
244 reserved.extend(vtl2_config.map(|r| (r, ReservedMemoryType::Vtl2Config)));
245 reserved.extend(
246 reserved_range
247 .into_iter()
248 .map(|r| (r, ReservedMemoryType::Vtl2Reserved)),
249 );
250 reserved.extend(
251 sidecar_image
252 .into_iter()
253 .map(|r| (r, ReservedMemoryType::SidecarImage)),
254 );
255 reserved.extend(
256 page_tables
257 .into_iter()
258 .map(|r| (r, ReservedMemoryType::TdxPageTables)),
259 );
260 reserved.extend(
261 log_buffer
262 .into_iter()
263 .map(|r| (r, ReservedMemoryType::BootshimLogBuffer)),
264 );
265 reserved.sort_unstable_by_key(|(r, _)| r.start());
266
267 let mut used_ranges: ArrayVec<(MemoryRange, AddressUsage), 13> = ArrayVec::new();
268
269 for (entry, r) in walk_ranges(
272 core::iter::once((bootshim_used, AddressUsage::Used)),
273 reserved.iter().cloned(),
274 ) {
275 match r {
276 RangeWalkResult::Left(_) => {
277 used_ranges.push((entry, AddressUsage::Used));
278 }
279 RangeWalkResult::Both(_, reserved_type) => {
280 used_ranges.push((entry, AddressUsage::Reserved(reserved_type)));
281 }
282 RangeWalkResult::Right(typ) => {
283 return Err(Error::ReservedRangeOutsideBootshimUsed {
284 reserved: entry,
285 typ,
286 bootshim_used,
287 });
288 }
289 RangeWalkResult::Neither => {}
290 }
291 }
292
293 if let Some(range) = pool_range {
295 used_ranges.push((
296 range,
297 AddressUsage::Reserved(ReservedMemoryType::Vtl2GpaPool),
298 ));
299 manager.vtl2_pool = true;
300 }
301 used_ranges.sort_unstable_by_key(|(r, _)| r.start());
302
303 assert!(manager.address_space.is_empty());
305 for (entry, r) in walk_ranges(
306 vtl2_ram.iter().map(|e| (e.range, e.vnode)),
307 used_ranges.iter().map(|(r, usage)| (*r, usage)),
308 ) {
309 match r {
310 RangeWalkResult::Left(vnode) => {
311 manager.address_space.push(AddressRange {
313 range: entry,
314 vnode,
315 usage: AddressUsage::Free,
316 });
317 }
318 RangeWalkResult::Both(vnode, usage) => {
319 manager.address_space.push(AddressRange {
321 range: entry,
322 vnode,
323 usage: *usage,
324 });
325 }
326 RangeWalkResult::Right(usage) => {
327 panic!("vtl2 range {entry:#x?} used by {usage:?} not contained in vtl2 ram");
328 }
329 RangeWalkResult::Neither => {}
330 }
331 }
332
333 Ok(manager)
334 }
335}
336
337impl AddressSpaceManager {
338 pub const fn new_const() -> Self {
339 Self {
340 address_space: ArrayVec::new_const(),
341 vtl2_pool: false,
342 }
343 }
344
345 fn allocate_range(
348 &mut self,
349 index: usize,
350 len: u64,
351 usage: AddressUsage,
352 allocation_policy: AllocationPolicy,
353 ) -> AllocatedRange {
354 assert!(usage != AddressUsage::Free);
355 let range = self.address_space.get_mut(index).expect("valid index");
356 assert_eq!(range.usage, AddressUsage::Free);
357 assert!(range.range.len() >= len);
358
359 let (used, remainder) = match allocation_policy {
360 AllocationPolicy::LowMemory => {
361 range.range.split_at_offset(len)
363 }
364 AllocationPolicy::HighMemory => {
365 let offset = range.range.len() - len;
367 let (remainder, used) = range.range.split_at_offset(offset);
368 (used, remainder)
369 }
370 };
371
372 let remainder = if !remainder.is_empty() {
373 Some(AddressRange {
374 range: remainder,
375 vnode: range.vnode,
376 usage: AddressUsage::Free,
377 })
378 } else {
379 None
380 };
381
382 range.usage = usage;
384 range.range = used;
385 let allocated = AllocatedRange {
386 range: used,
387 vnode: range.vnode,
388 };
389
390 if let Some(remainder) = remainder {
391 match allocation_policy {
392 AllocationPolicy::LowMemory => {
393 self.address_space.insert(index + 1, remainder);
396 }
397 AllocationPolicy::HighMemory => {
398 self.address_space.insert(index, remainder);
401 }
402 }
403 }
404
405 allocated
406 }
407
408 pub fn allocate(
418 &mut self,
419 required_vnode: Option<u32>,
420 len: u64,
421 allocation_type: AllocationType,
422 allocation_policy: AllocationPolicy,
423 ) -> Option<AllocatedRange> {
424 if len == 0 {
425 return None;
426 }
427
428 let len = len.div_ceil(PAGE_SIZE_4K) * PAGE_SIZE_4K;
431
432 fn find_index<'a>(
433 mut iter: impl Iterator<Item = (usize, &'a AddressRange)>,
434 preferred_vnode: Option<u32>,
435 len: u64,
436 ) -> Option<usize> {
437 iter.find_map(|(index, range)| {
438 if range.usage == AddressUsage::Free
439 && range.range.len() >= len
440 && preferred_vnode.map(|pv| pv == range.vnode).unwrap_or(true)
441 {
442 Some(index)
443 } else {
444 None
445 }
446 })
447 }
448
449 let index = {
451 let iter = self.address_space.iter().enumerate();
452 match allocation_policy {
453 AllocationPolicy::LowMemory => find_index(iter, required_vnode, len),
454 AllocationPolicy::HighMemory => find_index(iter.rev(), required_vnode, len),
455 }
456 };
457
458 let alloc = index.map(|index| {
459 self.allocate_range(
460 index,
461 len,
462 match allocation_type {
463 AllocationType::GpaPool => {
464 AddressUsage::Reserved(ReservedMemoryType::Vtl2GpaPool)
465 }
466 AllocationType::SidecarNode => {
467 AddressUsage::Reserved(ReservedMemoryType::SidecarNode)
468 }
469 },
470 allocation_policy,
471 )
472 });
473
474 if allocation_type == AllocationType::GpaPool && alloc.is_some() {
475 self.vtl2_pool = true;
476 }
477
478 alloc
479 }
480
481 pub fn vtl2_ranges(&self) -> impl Iterator<Item = (MemoryRange, MemoryVtlType)> + use<'_> {
483 memory_range::merge_adjacent_ranges(
484 self.address_space.iter().map(|r| (r.range, r.usage.into())),
485 )
486 }
487
488 pub fn reserved_vtl2_ranges(
491 &self,
492 ) -> impl Iterator<Item = (MemoryRange, ReservedMemoryType)> + use<'_> {
493 self.address_space.iter().filter_map(|r| match r.usage {
494 AddressUsage::Reserved(typ) => Some((r.range, typ)),
495 _ => None,
496 })
497 }
498
499 pub fn has_vtl2_pool(&self) -> bool {
501 self.vtl2_pool
502 }
503}
504
505#[derive(Debug, Clone, Copy, PartialEq, Eq)]
506pub enum AllocationType {
507 GpaPool,
508 SidecarNode,
509}
510
511pub enum AllocationPolicy {
512 LowMemory,
514 #[allow(dead_code)]
517 HighMemory,
518}
519
520#[cfg(test)]
521mod tests {
522 use super::*;
523
524 #[test]
525 fn test_allocate() {
526 let mut address_space = AddressSpaceManager::new_const();
527 let vtl2_ram = &[MemoryEntry {
528 range: MemoryRange::new(0x0..0x20000),
529 vnode: 0,
530 mem_type: MemoryMapEntryType::MEMORY,
531 }];
532
533 AddressSpaceManagerBuilder::new(
534 &mut address_space,
535 vtl2_ram,
536 MemoryRange::new(0x0..0xF000),
537 MemoryRange::new(0x0..0x2000),
538 [
539 MemoryRange::new(0x3000..0x4000),
540 MemoryRange::new(0x5000..0x6000),
541 ]
542 .iter()
543 .cloned(),
544 )
545 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
546 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
547 .init()
548 .unwrap();
549
550 let range = address_space
551 .allocate(
552 None,
553 0x1000,
554 AllocationType::GpaPool,
555 AllocationPolicy::HighMemory,
556 )
557 .unwrap();
558 assert_eq!(range.range, MemoryRange::new(0x1F000..0x20000));
559 assert!(address_space.has_vtl2_pool());
560
561 let range = address_space
562 .allocate(
563 None,
564 0x2000,
565 AllocationType::GpaPool,
566 AllocationPolicy::HighMemory,
567 )
568 .unwrap();
569 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1F000));
570
571 let range = address_space
572 .allocate(
573 None,
574 0x3000,
575 AllocationType::GpaPool,
576 AllocationPolicy::LowMemory,
577 )
578 .unwrap();
579 assert_eq!(range.range, MemoryRange::new(0xF000..0x12000));
580
581 let range = address_space
582 .allocate(
583 None,
584 0x1000,
585 AllocationType::GpaPool,
586 AllocationPolicy::LowMemory,
587 )
588 .unwrap();
589 assert_eq!(range.range, MemoryRange::new(0x12000..0x13000));
590 }
591
592 #[test]
594 fn test_allocate_numa() {
595 let mut address_space = AddressSpaceManager::new_const();
596 let vtl2_ram = &[
597 MemoryEntry {
598 range: MemoryRange::new(0x0..0x20000),
599 vnode: 0,
600 mem_type: MemoryMapEntryType::MEMORY,
601 },
602 MemoryEntry {
603 range: MemoryRange::new(0x20000..0x40000),
604 vnode: 1,
605 mem_type: MemoryMapEntryType::MEMORY,
606 },
607 MemoryEntry {
608 range: MemoryRange::new(0x40000..0x60000),
609 vnode: 2,
610 mem_type: MemoryMapEntryType::MEMORY,
611 },
612 MemoryEntry {
613 range: MemoryRange::new(0x60000..0x80000),
614 vnode: 3,
615 mem_type: MemoryMapEntryType::MEMORY,
616 },
617 ];
618
619 AddressSpaceManagerBuilder::new(
620 &mut address_space,
621 vtl2_ram,
622 MemoryRange::new(0x0..0x10000),
623 MemoryRange::new(0x0..0x2000),
624 [
625 MemoryRange::new(0x3000..0x4000),
626 MemoryRange::new(0x5000..0x6000),
627 ]
628 .iter()
629 .cloned(),
630 )
631 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
632 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
633 .init()
634 .unwrap();
635
636 let range = address_space
637 .allocate(
638 Some(0),
639 0x1000,
640 AllocationType::GpaPool,
641 AllocationPolicy::HighMemory,
642 )
643 .unwrap();
644 assert_eq!(range.range, MemoryRange::new(0x1F000..0x20000));
645 assert_eq!(range.vnode, 0);
646
647 let range = address_space
648 .allocate(
649 Some(0),
650 0x2000,
651 AllocationType::SidecarNode,
652 AllocationPolicy::HighMemory,
653 )
654 .unwrap();
655 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1F000));
656 assert_eq!(range.vnode, 0);
657
658 let range = address_space
659 .allocate(
660 Some(2),
661 0x3000,
662 AllocationType::GpaPool,
663 AllocationPolicy::HighMemory,
664 )
665 .unwrap();
666 assert_eq!(range.range, MemoryRange::new(0x5D000..0x60000));
667 assert_eq!(range.vnode, 2);
668
669 let range = address_space
671 .allocate(
672 Some(3),
673 0x20000,
674 AllocationType::SidecarNode,
675 AllocationPolicy::HighMemory,
676 )
677 .unwrap();
678 assert_eq!(range.range, MemoryRange::new(0x60000..0x80000));
679 assert_eq!(range.vnode, 3);
680
681 let range = address_space.allocate(
682 Some(3),
683 0x1000,
684 AllocationType::SidecarNode,
685 AllocationPolicy::HighMemory,
686 );
687 assert!(
688 range.is_none(),
689 "allocation should fail, no space left for node 3"
690 );
691 }
692
693 #[test]
695 fn test_unaligned_allocations() {
696 let mut address_space = AddressSpaceManager::new_const();
697 let vtl2_ram = &[MemoryEntry {
698 range: MemoryRange::new(0x0..0x20000),
699 vnode: 0,
700 mem_type: MemoryMapEntryType::MEMORY,
701 }];
702
703 AddressSpaceManagerBuilder::new(
704 &mut address_space,
705 vtl2_ram,
706 MemoryRange::new(0x0..0xF000),
707 MemoryRange::new(0x0..0x2000),
708 [
709 MemoryRange::new(0x3000..0x4000),
710 MemoryRange::new(0x5000..0x6000),
711 ]
712 .iter()
713 .cloned(),
714 )
715 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
716 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
717 .init()
718 .unwrap();
719
720 let range = address_space
721 .allocate(
722 None,
723 0x1001,
724 AllocationType::GpaPool,
725 AllocationPolicy::HighMemory,
726 )
727 .unwrap();
728 assert_eq!(range.range, MemoryRange::new(0x1E000..0x20000));
729
730 let range = address_space
731 .allocate(
732 None,
733 0xFFF,
734 AllocationType::GpaPool,
735 AllocationPolicy::HighMemory,
736 )
737 .unwrap();
738 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1E000));
739
740 let range = address_space.allocate(
741 None,
742 0,
743 AllocationType::GpaPool,
744 AllocationPolicy::HighMemory,
745 );
746 assert!(range.is_none());
747 }
748
749 #[test]
751 fn test_invalid_init_ranges() {
752 let vtl2_ram = [MemoryEntry {
753 range: MemoryRange::new(0x0..0x20000),
754 vnode: 0,
755 mem_type: MemoryMapEntryType::MEMORY,
756 }];
757 let bootshim_used = MemoryRange::new(0x0..0xF000);
758
759 let mut address_space = AddressSpaceManager::new_const();
761
762 let result = AddressSpaceManagerBuilder::new(
763 &mut address_space,
764 &vtl2_ram,
765 bootshim_used,
766 MemoryRange::new(0x0..0x2000),
767 [MemoryRange::new(0x10000..0x11000)].iter().cloned(), )
769 .init();
770
771 assert!(matches!(
772 result,
773 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
774 ));
775
776 let mut address_space = AddressSpaceManager::new_const();
779 let result = AddressSpaceManagerBuilder::new(
780 &mut address_space,
781 &vtl2_ram,
782 bootshim_used,
783 MemoryRange::new(0x0..0x2000),
784 [MemoryRange::new(0xE000..0x10000)].iter().cloned(), )
786 .init();
787
788 assert!(matches!(
789 result,
790 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
791 ));
792
793 let mut address_space = AddressSpaceManager::new_const();
795 let result = AddressSpaceManagerBuilder::new(
796 &mut address_space,
797 &vtl2_ram,
798 bootshim_used,
799 MemoryRange::new(0x10000..0x14000), [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
801 )
802 .init();
803
804 assert!(matches!(
805 result,
806 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
807 ));
808 }
809
810 #[test]
811 fn test_persisted_range() {
812 let vtl2_ram = [MemoryEntry {
813 range: MemoryRange::new(0x0..0x20000),
814 vnode: 0,
815 mem_type: MemoryMapEntryType::MEMORY,
816 }];
817 let bootshim_used = MemoryRange::new(0x0..0xF000);
818
819 let mut address_space = AddressSpaceManager::new_const();
820 AddressSpaceManagerBuilder::new(
821 &mut address_space,
822 &vtl2_ram,
823 bootshim_used,
824 MemoryRange::new(0x0..0xE000),
825 [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
826 )
827 .init()
828 .unwrap();
829
830 let expected = [
831 (
832 MemoryRange::new(0x0..0x1000),
833 MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
834 ),
835 (
836 MemoryRange::new(0x1000..0xE000),
837 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF,
838 ),
839 (MemoryRange::new(0xE000..0xF000), MemoryVtlType::VTL2_CONFIG),
840 (MemoryRange::new(0xF000..0x20000), MemoryVtlType::VTL2_RAM),
841 ];
842
843 for (expected, actual) in expected.iter().zip(address_space.vtl2_ranges()) {
844 assert_eq!(*expected, actual);
845 }
846
847 let mut address_space = AddressSpaceManager::new_const();
849 AddressSpaceManagerBuilder::new(
850 &mut address_space,
851 &vtl2_ram,
852 bootshim_used,
853 MemoryRange::new(0x0..0xA000),
854 [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
855 )
856 .init()
857 .unwrap();
858
859 let expected = [
860 (
861 MemoryRange::new(0x0..0x1000),
862 MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
863 ),
864 (
865 MemoryRange::new(0x1000..0xA000),
866 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF,
867 ),
868 (MemoryRange::new(0xA000..0xE000), MemoryVtlType::VTL2_RAM),
869 (MemoryRange::new(0xE000..0xF000), MemoryVtlType::VTL2_CONFIG),
870 (MemoryRange::new(0xF000..0x20000), MemoryVtlType::VTL2_RAM),
871 ];
872
873 for (expected, actual) in expected.iter().zip(address_space.vtl2_ranges()) {
874 assert_eq!(*expected, actual);
875 }
876 }
877
878 }