1use crate::host_params::MAX_VTL2_RAM_RANGES;
7use arrayvec::ArrayVec;
8use host_fdt_parser::MemoryEntry;
9#[cfg(test)]
10use igvm_defs::MemoryMapEntryType;
11use loader_defs::shim::MemoryVtlType;
12use memory_range::MemoryRange;
13use memory_range::RangeWalkResult;
14use memory_range::walk_ranges;
15use thiserror::Error;
16
17const PAGE_SIZE_4K: u64 = 4096;
18
19pub const MAX_RESERVED_MEM_RANGES: usize = 6 + sidecar_defs::MAX_NODES;
22
23const MAX_MEMORY_RANGES: usize = MAX_VTL2_RAM_RANGES + MAX_RESERVED_MEM_RANGES;
24
25const MAX_ADDRESS_RANGES: usize = MAX_MEMORY_RANGES * 2;
28
29#[derive(Clone, Copy, Debug, PartialEq, Eq)]
30pub enum ReservedMemoryType {
31 Vtl2Config,
33 Vtl2Reserved,
36 SidecarImage,
38 SidecarNode,
40 Vtl2GpaPool,
44 TdxPageTables,
46 BootshimLogBuffer,
48 PersistedStateHeader,
50 PersistedStatePayload,
52}
53
54impl From<ReservedMemoryType> for MemoryVtlType {
55 fn from(r: ReservedMemoryType) -> Self {
56 match r {
57 ReservedMemoryType::Vtl2Config => MemoryVtlType::VTL2_CONFIG,
58 ReservedMemoryType::SidecarImage => MemoryVtlType::VTL2_SIDECAR_IMAGE,
59 ReservedMemoryType::SidecarNode => MemoryVtlType::VTL2_SIDECAR_NODE,
60 ReservedMemoryType::Vtl2Reserved => MemoryVtlType::VTL2_RESERVED,
61 ReservedMemoryType::Vtl2GpaPool => MemoryVtlType::VTL2_GPA_POOL,
62 ReservedMemoryType::TdxPageTables => MemoryVtlType::VTL2_TDX_PAGE_TABLES,
63 ReservedMemoryType::BootshimLogBuffer => MemoryVtlType::VTL2_BOOTSHIM_LOG_BUFFER,
64 ReservedMemoryType::PersistedStateHeader => MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
65 ReservedMemoryType::PersistedStatePayload => {
66 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF
67 }
68 }
69 }
70}
71
72#[derive(Clone, Copy, Debug, PartialEq, Eq)]
73enum AddressUsage {
74 Free,
76 Used,
78 Reserved(ReservedMemoryType),
80}
81
82#[derive(Debug)]
83struct AddressRange {
84 range: MemoryRange,
85 vnode: u32,
86 usage: AddressUsage,
87}
88
89impl From<AddressUsage> for MemoryVtlType {
90 fn from(usage: AddressUsage) -> Self {
91 match usage {
92 AddressUsage::Free => MemoryVtlType::VTL2_RAM,
93 AddressUsage::Used => MemoryVtlType::VTL2_RAM,
94 AddressUsage::Reserved(r) => r.into(),
95 }
96 }
97}
98
99#[derive(Debug, Clone, Copy)]
100pub struct AllocatedRange {
101 pub range: MemoryRange,
102 pub vnode: u32,
103}
104
105#[derive(Debug, Error)]
106pub enum Error {
107 #[error("ram len {len} greater than maximum {max}")]
108 RamLen { len: u64, max: u64 },
109 #[error("already initialized")]
110 AlreadyInitialized,
111 #[error(
112 "reserved range {reserved:#x?}, type {typ:?} outside of bootshim used {bootshim_used:#x?}"
113 )]
114 ReservedRangeOutsideBootshimUsed {
115 reserved: MemoryRange,
116 typ: ReservedMemoryType,
117 bootshim_used: MemoryRange,
118 },
119}
120
121#[derive(Debug)]
122pub struct AddressSpaceManager {
123 address_space: ArrayVec<AddressRange, MAX_ADDRESS_RANGES>,
125
126 vtl2_pool: bool,
128}
129
130pub struct AddressSpaceManagerBuilder<'a, I: Iterator<Item = MemoryRange>> {
132 manager: &'a mut AddressSpaceManager,
133 vtl2_ram: &'a [MemoryEntry],
134 bootshim_used: MemoryRange,
135 persisted_state_region: MemoryRange,
136 vtl2_config: I,
137 reserved_range: Option<MemoryRange>,
138 sidecar_image: Option<MemoryRange>,
139 page_tables: Option<MemoryRange>,
140 log_buffer: Option<MemoryRange>,
141 pool_range: Option<MemoryRange>,
142}
143
144impl<'a, I: Iterator<Item = MemoryRange>> AddressSpaceManagerBuilder<'a, I> {
145 pub fn new(
158 manager: &'a mut AddressSpaceManager,
159 vtl2_ram: &'a [MemoryEntry],
160 bootshim_used: MemoryRange,
161 persisted_state_region: MemoryRange,
162 vtl2_config: I,
163 ) -> AddressSpaceManagerBuilder<'a, I> {
164 AddressSpaceManagerBuilder {
165 manager,
166 vtl2_ram,
167 bootshim_used,
168 persisted_state_region,
169 vtl2_config,
170 reserved_range: None,
171 sidecar_image: None,
172 page_tables: None,
173 log_buffer: None,
174 pool_range: None,
175 }
176 }
177
178 pub fn with_reserved_range(mut self, reserved_range: MemoryRange) -> Self {
180 self.reserved_range = Some(reserved_range);
181 self
182 }
183
184 pub fn with_sidecar_image(mut self, sidecar_image: MemoryRange) -> Self {
186 self.sidecar_image = Some(sidecar_image);
187 self
188 }
189
190 pub fn with_log_buffer(mut self, log_buffer: MemoryRange) -> Self {
192 self.log_buffer = Some(log_buffer);
193 self
194 }
195
196 pub fn with_pool_range(mut self, pool_range: MemoryRange) -> Self {
198 self.pool_range = Some(pool_range);
199 self
200 }
201
202 pub fn init(self) -> Result<&'a mut AddressSpaceManager, Error> {
204 let Self {
205 manager,
206 vtl2_ram,
207 bootshim_used,
208 persisted_state_region,
209 vtl2_config,
210 reserved_range,
211 sidecar_image,
212 page_tables,
213 log_buffer,
214 pool_range,
215 } = self;
216
217 if vtl2_ram.len() > MAX_VTL2_RAM_RANGES {
218 return Err(Error::RamLen {
219 len: vtl2_ram.len() as u64,
220 max: MAX_VTL2_RAM_RANGES as u64,
221 });
222 }
223
224 if !manager.address_space.is_empty() {
225 return Err(Error::AlreadyInitialized);
226 }
227
228 let (persisted_header, persisted_payload) =
232 persisted_state_region.split_at_offset(PAGE_SIZE_4K);
233
234 let mut reserved: ArrayVec<(MemoryRange, ReservedMemoryType), 20> = ArrayVec::new();
236 reserved.push((persisted_header, ReservedMemoryType::PersistedStateHeader));
237 reserved.push((persisted_payload, ReservedMemoryType::PersistedStatePayload));
238 reserved.extend(vtl2_config.map(|r| (r, ReservedMemoryType::Vtl2Config)));
239 reserved.extend(
240 reserved_range
241 .into_iter()
242 .map(|r| (r, ReservedMemoryType::Vtl2Reserved)),
243 );
244 reserved.extend(
245 sidecar_image
246 .into_iter()
247 .map(|r| (r, ReservedMemoryType::SidecarImage)),
248 );
249 reserved.extend(
250 page_tables
251 .into_iter()
252 .map(|r| (r, ReservedMemoryType::TdxPageTables)),
253 );
254 reserved.extend(
255 log_buffer
256 .into_iter()
257 .map(|r| (r, ReservedMemoryType::BootshimLogBuffer)),
258 );
259 reserved.sort_unstable_by_key(|(r, _)| r.start());
260
261 let mut used_ranges: ArrayVec<(MemoryRange, AddressUsage), 13> = ArrayVec::new();
262
263 for (entry, r) in walk_ranges(
266 core::iter::once((bootshim_used, AddressUsage::Used)),
267 reserved.iter().cloned(),
268 ) {
269 match r {
270 RangeWalkResult::Left(_) => {
271 used_ranges.push((entry, AddressUsage::Used));
272 }
273 RangeWalkResult::Both(_, reserved_type) => {
274 used_ranges.push((entry, AddressUsage::Reserved(reserved_type)));
275 }
276 RangeWalkResult::Right(typ) => {
277 return Err(Error::ReservedRangeOutsideBootshimUsed {
278 reserved: entry,
279 typ,
280 bootshim_used,
281 });
282 }
283 RangeWalkResult::Neither => {}
284 }
285 }
286
287 if let Some(range) = pool_range {
289 used_ranges.push((
290 range,
291 AddressUsage::Reserved(ReservedMemoryType::Vtl2GpaPool),
292 ));
293 manager.vtl2_pool = true;
294 }
295 used_ranges.sort_unstable_by_key(|(r, _)| r.start());
296
297 assert!(manager.address_space.is_empty());
299 for (entry, r) in walk_ranges(
300 vtl2_ram.iter().map(|e| (e.range, e.vnode)),
301 used_ranges.iter().map(|(r, usage)| (*r, usage)),
302 ) {
303 match r {
304 RangeWalkResult::Left(vnode) => {
305 manager.address_space.push(AddressRange {
307 range: entry,
308 vnode,
309 usage: AddressUsage::Free,
310 });
311 }
312 RangeWalkResult::Both(vnode, usage) => {
313 manager.address_space.push(AddressRange {
315 range: entry,
316 vnode,
317 usage: *usage,
318 });
319 }
320 RangeWalkResult::Right(usage) => {
321 panic!("vtl2 range {entry:#x?} used by {usage:?} not contained in vtl2 ram");
322 }
323 RangeWalkResult::Neither => {}
324 }
325 }
326
327 Ok(manager)
328 }
329}
330
331impl AddressSpaceManager {
332 pub const fn new_const() -> Self {
333 Self {
334 address_space: ArrayVec::new_const(),
335 vtl2_pool: false,
336 }
337 }
338
339 fn allocate_range(
345 &mut self,
346 index: usize,
347 len: u64,
348 usage: AddressUsage,
349 allocation_policy: AllocationPolicy,
350 alignment: Option<u64>,
351 ) -> AllocatedRange {
352 assert!(usage != AddressUsage::Free);
353 let range = self.address_space.get_mut(index).expect("valid index");
354 assert_eq!(range.usage, AddressUsage::Free);
355
356 let subrange = if let Some(alignment) = alignment {
357 range.range.aligned_subrange(alignment)
358 } else {
359 range.range
360 };
361
362 assert!(subrange.len() >= len);
363 assert_ne!(subrange, MemoryRange::EMPTY);
364
365 let used = match allocation_policy {
366 AllocationPolicy::LowMemory => {
367 let (used, _) = subrange.split_at_offset(len);
369 used
370 }
371 AllocationPolicy::HighMemory => {
372 let offset = subrange.len() - len;
374 let (_, used) = subrange.split_at_offset(offset);
375 used
376 }
377 };
378
379 let left = MemoryRange::new(range.range.start()..used.start());
380 let right = MemoryRange::new(used.end()..range.range.end());
381
382 let to_address_range = |r: MemoryRange| -> Option<AddressRange> {
383 if !r.is_empty() {
384 Some(AddressRange {
385 range: r,
386 vnode: range.vnode,
387 usage: AddressUsage::Free,
388 })
389 } else {
390 None
391 }
392 };
393
394 let left = to_address_range(left);
395 let right = to_address_range(right);
396
397 range.usage = usage;
399 range.range = used;
400 let allocated = AllocatedRange {
401 range: used,
402 vnode: range.vnode,
403 };
404
405 if let Some(right) = right {
406 self.address_space.insert(index + 1, right);
407 }
408
409 if let Some(left) = left {
410 self.address_space.insert(index, left);
411 }
412
413 allocated
414 }
415
416 fn allocate_inner(
417 &mut self,
418 required_vnode: Option<u32>,
419 len: u64,
420 allocation_type: AllocationType,
421 allocation_policy: AllocationPolicy,
422 alignment: Option<u64>,
423 ) -> Option<AllocatedRange> {
424 if len == 0 {
425 return None;
426 }
427
428 let len = len.div_ceil(PAGE_SIZE_4K) * PAGE_SIZE_4K;
431
432 fn find_index<'a>(
433 mut iter: impl Iterator<Item = (usize, &'a AddressRange)>,
434 preferred_vnode: Option<u32>,
435 len: u64,
436 alignment: Option<u64>,
437 ) -> Option<usize> {
438 iter.find_map(|(index, range)| {
439 let is_aligned: bool = alignment.is_none()
440 || (alignment.is_some()
441 && range.range.aligned_subrange(alignment.unwrap()).len() >= len);
442 if range.usage == AddressUsage::Free
443 && range.range.len() >= len
444 && preferred_vnode.map(|pv| pv == range.vnode).unwrap_or(true)
445 && is_aligned
446 {
447 Some(index)
448 } else {
449 None
450 }
451 })
452 }
453
454 let index = {
456 let iter = self.address_space.iter().enumerate();
457 match allocation_policy {
458 AllocationPolicy::LowMemory => find_index(iter, required_vnode, len, alignment),
459 AllocationPolicy::HighMemory => {
460 find_index(iter.rev(), required_vnode, len, alignment)
461 }
462 }
463 };
464
465 let address_usage = match allocation_type {
466 AllocationType::GpaPool => AddressUsage::Reserved(ReservedMemoryType::Vtl2GpaPool),
467 AllocationType::SidecarNode => AddressUsage::Reserved(ReservedMemoryType::SidecarNode),
468 AllocationType::TdxPageTables => {
469 AddressUsage::Reserved(ReservedMemoryType::TdxPageTables)
470 }
471 };
472
473 let alloc = index.map(|index| {
474 self.allocate_range(index, len, address_usage, allocation_policy, alignment)
475 });
476
477 if allocation_type == AllocationType::GpaPool && alloc.is_some() {
478 self.vtl2_pool = true;
479 }
480
481 alloc
482 }
483
484 pub fn allocate(
494 &mut self,
495 required_vnode: Option<u32>,
496 len: u64,
497 allocation_type: AllocationType,
498 allocation_policy: AllocationPolicy,
499 ) -> Option<AllocatedRange> {
500 self.allocate_inner(
501 required_vnode,
502 len,
503 allocation_type,
504 allocation_policy,
505 None,
506 )
507 }
508
509 #[cfg_attr(all(target_arch = "aarch64", not(test)), expect(dead_code))]
522 pub fn allocate_aligned(
523 &mut self,
524 required_vnode: Option<u32>,
525 len: u64,
526 allocation_type: AllocationType,
527 allocation_policy: AllocationPolicy,
528 alignment: u64,
529 ) -> Option<AllocatedRange> {
530 self.allocate_inner(
531 required_vnode,
532 len,
533 allocation_type,
534 allocation_policy,
535 Some(alignment),
536 )
537 }
538
539 pub fn vtl2_ranges(&self) -> impl Iterator<Item = (MemoryRange, MemoryVtlType)> + use<'_> {
541 memory_range::merge_adjacent_ranges(
542 self.address_space.iter().map(|r| (r.range, r.usage.into())),
543 )
544 }
545
546 pub fn reserved_vtl2_ranges(
549 &self,
550 ) -> impl Iterator<Item = (MemoryRange, ReservedMemoryType)> + use<'_> {
551 self.address_space.iter().filter_map(|r| match r.usage {
552 AddressUsage::Reserved(typ) => Some((r.range, typ)),
553 _ => None,
554 })
555 }
556
557 pub fn free_ranges(&self, vnode: u32) -> impl Iterator<Item = MemoryRange> + use<'_> {
560 self.address_space.iter().filter_map(move |r| {
561 if r.usage == AddressUsage::Free && r.vnode == vnode {
562 Some(r.range)
563 } else {
564 None
565 }
566 })
567 }
568
569 pub fn has_vtl2_pool(&self) -> bool {
571 self.vtl2_pool
572 }
573}
574
575#[derive(Debug, Clone, Copy, PartialEq, Eq)]
576pub enum AllocationType {
577 GpaPool,
578 SidecarNode,
579 #[cfg_attr(target_arch = "aarch64", expect(dead_code))]
580 TdxPageTables,
581}
582
583pub enum AllocationPolicy {
584 LowMemory,
586 HighMemory,
588}
589
590#[cfg(test)]
591mod tests {
592 use super::*;
593
594 #[test]
595 fn test_allocate() {
596 let mut address_space = AddressSpaceManager::new_const();
597 let vtl2_ram = &[MemoryEntry {
598 range: MemoryRange::new(0x0..0x20000),
599 vnode: 0,
600 mem_type: MemoryMapEntryType::MEMORY,
601 }];
602
603 AddressSpaceManagerBuilder::new(
604 &mut address_space,
605 vtl2_ram,
606 MemoryRange::new(0x0..0xF000),
607 MemoryRange::new(0x0..0x2000),
608 [
609 MemoryRange::new(0x3000..0x4000),
610 MemoryRange::new(0x5000..0x6000),
611 ]
612 .iter()
613 .cloned(),
614 )
615 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
616 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
617 .init()
618 .unwrap();
619
620 let range = address_space
621 .allocate(
622 None,
623 0x1000,
624 AllocationType::GpaPool,
625 AllocationPolicy::HighMemory,
626 )
627 .unwrap();
628 assert_eq!(range.range, MemoryRange::new(0x1F000..0x20000));
629 assert!(address_space.has_vtl2_pool());
630
631 let range = address_space
632 .allocate(
633 None,
634 0x2000,
635 AllocationType::GpaPool,
636 AllocationPolicy::HighMemory,
637 )
638 .unwrap();
639 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1F000));
640
641 let range = address_space
642 .allocate(
643 None,
644 0x3000,
645 AllocationType::GpaPool,
646 AllocationPolicy::LowMemory,
647 )
648 .unwrap();
649 assert_eq!(range.range, MemoryRange::new(0xF000..0x12000));
650
651 let range = address_space
652 .allocate(
653 None,
654 0x1000,
655 AllocationType::GpaPool,
656 AllocationPolicy::LowMemory,
657 )
658 .unwrap();
659 assert_eq!(range.range, MemoryRange::new(0x12000..0x13000));
660
661 let free_ranges: Vec<MemoryRange> = address_space.free_ranges(0).collect();
662 assert_eq!(free_ranges, vec![MemoryRange::new(0x13000..0x1D000)]);
663 }
664
665 #[test]
666 fn test_allocate_aligned() {
667 let mut address_space = AddressSpaceManager::new_const();
668 let vtl2_ram = &[MemoryEntry {
669 range: MemoryRange::new(0x0..0x20000),
670 vnode: 0,
671 mem_type: MemoryMapEntryType::MEMORY,
672 }];
673
674 AddressSpaceManagerBuilder::new(
675 &mut address_space,
676 vtl2_ram,
677 MemoryRange::new(0x0..0xF000),
678 MemoryRange::new(0x0..0x2000),
679 [
680 MemoryRange::new(0x3000..0x4000),
681 MemoryRange::new(0x5000..0x6000),
682 ]
683 .iter()
684 .cloned(),
685 )
686 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
687 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
688 .init()
689 .unwrap();
690
691 let alignment = 4096 * 16;
692 let range = address_space
693 .allocate_aligned(
694 None,
695 0x1000,
696 AllocationType::GpaPool,
697 AllocationPolicy::LowMemory,
698 alignment,
699 )
700 .unwrap();
701
702 assert_eq!(0, range.range.start() % alignment);
703
704 let alignment = 4096 * 4;
705 let range = address_space
706 .allocate_aligned(
707 None,
708 0x1000,
709 AllocationType::GpaPool,
710 AllocationPolicy::HighMemory,
711 alignment,
712 )
713 .unwrap();
714
715 assert_eq!(0, range.range.end() % alignment);
716 }
717
718 #[test]
719 fn test_failed_alignment() {
720 let mut address_space = AddressSpaceManager::new_const();
721 let vtl2_ram = &[MemoryEntry {
722 range: MemoryRange::new(0x0..0x20000),
723 vnode: 0,
724 mem_type: MemoryMapEntryType::MEMORY,
725 }];
726
727 AddressSpaceManagerBuilder::new(
728 &mut address_space,
729 vtl2_ram,
730 MemoryRange::new(0x0..0xF000),
731 MemoryRange::new(0x0..0x2000),
732 [
733 MemoryRange::new(0x3000..0x4000),
734 MemoryRange::new(0x5000..0x6000),
735 ]
736 .iter()
737 .cloned(),
738 )
739 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
740 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
741 .init()
742 .unwrap();
743
744 let alignment = 1024 * 1024 * 2;
745 let range = address_space.allocate_aligned(
746 None,
747 0x1000,
748 AllocationType::GpaPool,
749 AllocationPolicy::LowMemory,
750 alignment,
751 );
752 assert!(range.is_none());
753 }
754
755 #[test]
757 fn test_allocate_numa() {
758 let mut address_space = AddressSpaceManager::new_const();
759 let vtl2_ram = &[
760 MemoryEntry {
761 range: MemoryRange::new(0x0..0x20000),
762 vnode: 0,
763 mem_type: MemoryMapEntryType::MEMORY,
764 },
765 MemoryEntry {
766 range: MemoryRange::new(0x20000..0x40000),
767 vnode: 1,
768 mem_type: MemoryMapEntryType::MEMORY,
769 },
770 MemoryEntry {
771 range: MemoryRange::new(0x40000..0x60000),
772 vnode: 2,
773 mem_type: MemoryMapEntryType::MEMORY,
774 },
775 MemoryEntry {
776 range: MemoryRange::new(0x60000..0x80000),
777 vnode: 3,
778 mem_type: MemoryMapEntryType::MEMORY,
779 },
780 ];
781
782 AddressSpaceManagerBuilder::new(
783 &mut address_space,
784 vtl2_ram,
785 MemoryRange::new(0x0..0x10000),
786 MemoryRange::new(0x0..0x2000),
787 [
788 MemoryRange::new(0x3000..0x4000),
789 MemoryRange::new(0x5000..0x6000),
790 ]
791 .iter()
792 .cloned(),
793 )
794 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
795 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
796 .init()
797 .unwrap();
798
799 let range = address_space
800 .allocate(
801 Some(0),
802 0x1000,
803 AllocationType::GpaPool,
804 AllocationPolicy::HighMemory,
805 )
806 .unwrap();
807 assert_eq!(range.range, MemoryRange::new(0x1F000..0x20000));
808 assert_eq!(range.vnode, 0);
809
810 let range = address_space
811 .allocate(
812 Some(0),
813 0x2000,
814 AllocationType::SidecarNode,
815 AllocationPolicy::HighMemory,
816 )
817 .unwrap();
818 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1F000));
819 assert_eq!(range.vnode, 0);
820
821 let range = address_space
822 .allocate(
823 Some(2),
824 0x3000,
825 AllocationType::GpaPool,
826 AllocationPolicy::HighMemory,
827 )
828 .unwrap();
829 assert_eq!(range.range, MemoryRange::new(0x5D000..0x60000));
830 assert_eq!(range.vnode, 2);
831
832 let range = address_space
834 .allocate(
835 Some(3),
836 0x20000,
837 AllocationType::SidecarNode,
838 AllocationPolicy::HighMemory,
839 )
840 .unwrap();
841 assert_eq!(range.range, MemoryRange::new(0x60000..0x80000));
842 assert_eq!(range.vnode, 3);
843
844 let range = address_space.allocate(
845 Some(3),
846 0x1000,
847 AllocationType::SidecarNode,
848 AllocationPolicy::HighMemory,
849 );
850 assert!(
851 range.is_none(),
852 "allocation should fail, no space left for node 3"
853 );
854 }
855
856 #[test]
858 fn test_unaligned_allocations() {
859 let mut address_space = AddressSpaceManager::new_const();
860 let vtl2_ram = &[MemoryEntry {
861 range: MemoryRange::new(0x0..0x20000),
862 vnode: 0,
863 mem_type: MemoryMapEntryType::MEMORY,
864 }];
865
866 AddressSpaceManagerBuilder::new(
867 &mut address_space,
868 vtl2_ram,
869 MemoryRange::new(0x0..0xF000),
870 MemoryRange::new(0x0..0x2000),
871 [
872 MemoryRange::new(0x3000..0x4000),
873 MemoryRange::new(0x5000..0x6000),
874 ]
875 .iter()
876 .cloned(),
877 )
878 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
879 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
880 .init()
881 .unwrap();
882
883 let range = address_space
884 .allocate(
885 None,
886 0x1001,
887 AllocationType::GpaPool,
888 AllocationPolicy::HighMemory,
889 )
890 .unwrap();
891 assert_eq!(range.range, MemoryRange::new(0x1E000..0x20000));
892
893 let range = address_space
894 .allocate(
895 None,
896 0xFFF,
897 AllocationType::GpaPool,
898 AllocationPolicy::HighMemory,
899 )
900 .unwrap();
901 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1E000));
902
903 let range = address_space.allocate(
904 None,
905 0,
906 AllocationType::GpaPool,
907 AllocationPolicy::HighMemory,
908 );
909 assert!(range.is_none());
910 }
911
912 #[test]
914 fn test_invalid_init_ranges() {
915 let vtl2_ram = [MemoryEntry {
916 range: MemoryRange::new(0x0..0x20000),
917 vnode: 0,
918 mem_type: MemoryMapEntryType::MEMORY,
919 }];
920 let bootshim_used = MemoryRange::new(0x0..0xF000);
921
922 let mut address_space = AddressSpaceManager::new_const();
924
925 let result = AddressSpaceManagerBuilder::new(
926 &mut address_space,
927 &vtl2_ram,
928 bootshim_used,
929 MemoryRange::new(0x0..0x2000),
930 [MemoryRange::new(0x10000..0x11000)].iter().cloned(), )
932 .init();
933
934 assert!(matches!(
935 result,
936 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
937 ));
938
939 let mut address_space = AddressSpaceManager::new_const();
942 let result = AddressSpaceManagerBuilder::new(
943 &mut address_space,
944 &vtl2_ram,
945 bootshim_used,
946 MemoryRange::new(0x0..0x2000),
947 [MemoryRange::new(0xE000..0x10000)].iter().cloned(), )
949 .init();
950
951 assert!(matches!(
952 result,
953 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
954 ));
955
956 let mut address_space = AddressSpaceManager::new_const();
958 let result = AddressSpaceManagerBuilder::new(
959 &mut address_space,
960 &vtl2_ram,
961 bootshim_used,
962 MemoryRange::new(0x10000..0x14000), [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
964 )
965 .init();
966
967 assert!(matches!(
968 result,
969 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
970 ));
971 }
972
973 #[test]
974 fn test_persisted_range() {
975 let vtl2_ram = [MemoryEntry {
976 range: MemoryRange::new(0x0..0x20000),
977 vnode: 0,
978 mem_type: MemoryMapEntryType::MEMORY,
979 }];
980 let bootshim_used = MemoryRange::new(0x0..0xF000);
981
982 let mut address_space = AddressSpaceManager::new_const();
983 AddressSpaceManagerBuilder::new(
984 &mut address_space,
985 &vtl2_ram,
986 bootshim_used,
987 MemoryRange::new(0x0..0xE000),
988 [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
989 )
990 .init()
991 .unwrap();
992
993 let expected = [
994 (
995 MemoryRange::new(0x0..0x1000),
996 MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
997 ),
998 (
999 MemoryRange::new(0x1000..0xE000),
1000 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF,
1001 ),
1002 (MemoryRange::new(0xE000..0xF000), MemoryVtlType::VTL2_CONFIG),
1003 (MemoryRange::new(0xF000..0x20000), MemoryVtlType::VTL2_RAM),
1004 ];
1005
1006 for (expected, actual) in expected.iter().zip(address_space.vtl2_ranges()) {
1007 assert_eq!(*expected, actual);
1008 }
1009
1010 let mut address_space = AddressSpaceManager::new_const();
1012 AddressSpaceManagerBuilder::new(
1013 &mut address_space,
1014 &vtl2_ram,
1015 bootshim_used,
1016 MemoryRange::new(0x0..0xA000),
1017 [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
1018 )
1019 .init()
1020 .unwrap();
1021
1022 let expected = [
1023 (
1024 MemoryRange::new(0x0..0x1000),
1025 MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
1026 ),
1027 (
1028 MemoryRange::new(0x1000..0xA000),
1029 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF,
1030 ),
1031 (MemoryRange::new(0xA000..0xE000), MemoryVtlType::VTL2_RAM),
1032 (MemoryRange::new(0xE000..0xF000), MemoryVtlType::VTL2_CONFIG),
1033 (MemoryRange::new(0xF000..0x20000), MemoryVtlType::VTL2_RAM),
1034 ];
1035
1036 for (expected, actual) in expected.iter().zip(address_space.vtl2_ranges()) {
1037 assert_eq!(*expected, actual);
1038 }
1039 }
1040
1041 }