1use crate::host_params::MAX_VTL2_RAM_RANGES;
7use arrayvec::ArrayVec;
8use host_fdt_parser::MemoryEntry;
9#[cfg(test)]
10use igvm_defs::MemoryMapEntryType;
11use loader_defs::shim::MemoryVtlType;
12use memory_range::MemoryRange;
13use memory_range::RangeWalkResult;
14use memory_range::walk_ranges;
15use thiserror::Error;
16
17const PAGE_SIZE_4K: u64 = 4096;
18
19pub const MAX_RESERVED_MEM_RANGES: usize = 6 + sidecar_defs::MAX_NODES;
22
23const MAX_MEMORY_RANGES: usize = MAX_VTL2_RAM_RANGES + MAX_RESERVED_MEM_RANGES;
24
25const MAX_ADDRESS_RANGES: usize = MAX_MEMORY_RANGES * 2;
28
29#[derive(Clone, Copy, Debug, PartialEq, Eq)]
30pub enum ReservedMemoryType {
31 Vtl2Config,
33 Vtl2Reserved,
36 SidecarImage,
38 SidecarNode,
40 Vtl2GpaPool,
44 TdxPageTables,
46 BootshimLogBuffer,
48 PersistedStateHeader,
50 PersistedStatePayload,
52}
53
54impl From<ReservedMemoryType> for MemoryVtlType {
55 fn from(r: ReservedMemoryType) -> Self {
56 match r {
57 ReservedMemoryType::Vtl2Config => MemoryVtlType::VTL2_CONFIG,
58 ReservedMemoryType::SidecarImage => MemoryVtlType::VTL2_SIDECAR_IMAGE,
59 ReservedMemoryType::SidecarNode => MemoryVtlType::VTL2_SIDECAR_NODE,
60 ReservedMemoryType::Vtl2Reserved => MemoryVtlType::VTL2_RESERVED,
61 ReservedMemoryType::Vtl2GpaPool => MemoryVtlType::VTL2_GPA_POOL,
62 ReservedMemoryType::TdxPageTables => MemoryVtlType::VTL2_TDX_PAGE_TABLES,
63 ReservedMemoryType::BootshimLogBuffer => MemoryVtlType::VTL2_BOOTSHIM_LOG_BUFFER,
64 ReservedMemoryType::PersistedStateHeader => MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
65 ReservedMemoryType::PersistedStatePayload => {
66 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF
67 }
68 }
69 }
70}
71
72#[derive(Clone, Copy, Debug, PartialEq, Eq)]
73enum AddressUsage {
74 Free,
76 Used,
78 Reserved(ReservedMemoryType),
80}
81
82#[derive(Debug)]
83struct AddressRange {
84 range: MemoryRange,
85 vnode: u32,
86 usage: AddressUsage,
87}
88
89impl From<AddressUsage> for MemoryVtlType {
90 fn from(usage: AddressUsage) -> Self {
91 match usage {
92 AddressUsage::Free => MemoryVtlType::VTL2_RAM,
93 AddressUsage::Used => MemoryVtlType::VTL2_RAM,
94 AddressUsage::Reserved(r) => r.into(),
95 }
96 }
97}
98
99#[derive(Debug, Clone, Copy)]
100pub struct AllocatedRange {
101 pub range: MemoryRange,
102 pub vnode: u32,
103}
104
105#[derive(Debug, Error)]
106pub enum Error {
107 #[error("ram len {len} greater than maximum {max}")]
108 RamLen { len: u64, max: u64 },
109 #[error("already initialized")]
110 AlreadyInitialized,
111 #[error(
112 "reserved range {reserved:#x?}, type {typ:?} outside of bootshim used {bootshim_used:#x?}"
113 )]
114 ReservedRangeOutsideBootshimUsed {
115 reserved: MemoryRange,
116 typ: ReservedMemoryType,
117 bootshim_used: MemoryRange,
118 },
119}
120
121#[derive(Debug)]
122pub struct AddressSpaceManager {
123 address_space: ArrayVec<AddressRange, MAX_ADDRESS_RANGES>,
125
126 vtl2_pool: bool,
128}
129
130pub struct AddressSpaceManagerBuilder<'a, I: Iterator<Item = MemoryRange>> {
132 manager: &'a mut AddressSpaceManager,
133 vtl2_ram: &'a [MemoryEntry],
134 bootshim_used: MemoryRange,
135 persisted_state_region: MemoryRange,
136 vtl2_config: I,
137 reserved_range: Option<MemoryRange>,
138 sidecar_image: Option<MemoryRange>,
139 page_tables: Option<MemoryRange>,
140 log_buffer: Option<MemoryRange>,
141 pool_range: Option<MemoryRange>,
142}
143
144impl<'a, I: Iterator<Item = MemoryRange>> AddressSpaceManagerBuilder<'a, I> {
145 pub fn new(
158 manager: &'a mut AddressSpaceManager,
159 vtl2_ram: &'a [MemoryEntry],
160 bootshim_used: MemoryRange,
161 persisted_state_region: MemoryRange,
162 vtl2_config: I,
163 ) -> AddressSpaceManagerBuilder<'a, I> {
164 AddressSpaceManagerBuilder {
165 manager,
166 vtl2_ram,
167 bootshim_used,
168 persisted_state_region,
169 vtl2_config,
170 reserved_range: None,
171 sidecar_image: None,
172 page_tables: None,
173 log_buffer: None,
174 pool_range: None,
175 }
176 }
177
178 pub fn with_reserved_range(mut self, reserved_range: MemoryRange) -> Self {
180 self.reserved_range = Some(reserved_range);
181 self
182 }
183
184 pub fn with_sidecar_image(mut self, sidecar_image: MemoryRange) -> Self {
186 self.sidecar_image = Some(sidecar_image);
187 self
188 }
189
190 pub fn with_log_buffer(mut self, log_buffer: MemoryRange) -> Self {
192 self.log_buffer = Some(log_buffer);
193 self
194 }
195
196 pub fn with_pool_range(mut self, pool_range: MemoryRange) -> Self {
198 self.pool_range = Some(pool_range);
199 self
200 }
201
202 pub fn init(self) -> Result<&'a mut AddressSpaceManager, Error> {
204 let Self {
205 manager,
206 vtl2_ram,
207 bootshim_used,
208 persisted_state_region,
209 vtl2_config,
210 reserved_range,
211 sidecar_image,
212 page_tables,
213 log_buffer,
214 pool_range,
215 } = self;
216
217 if vtl2_ram.len() > MAX_VTL2_RAM_RANGES {
218 return Err(Error::RamLen {
219 len: vtl2_ram.len() as u64,
220 max: MAX_VTL2_RAM_RANGES as u64,
221 });
222 }
223
224 if !manager.address_space.is_empty() {
225 return Err(Error::AlreadyInitialized);
226 }
227
228 let (persisted_header, persisted_payload) =
232 persisted_state_region.split_at_offset(PAGE_SIZE_4K);
233
234 let mut reserved: ArrayVec<(MemoryRange, ReservedMemoryType), 20> = ArrayVec::new();
236 reserved.push((persisted_header, ReservedMemoryType::PersistedStateHeader));
237 reserved.push((persisted_payload, ReservedMemoryType::PersistedStatePayload));
238 reserved.extend(vtl2_config.map(|r| (r, ReservedMemoryType::Vtl2Config)));
239 reserved.extend(
240 reserved_range
241 .into_iter()
242 .map(|r| (r, ReservedMemoryType::Vtl2Reserved)),
243 );
244 reserved.extend(
245 sidecar_image
246 .into_iter()
247 .map(|r| (r, ReservedMemoryType::SidecarImage)),
248 );
249 reserved.extend(
250 page_tables
251 .into_iter()
252 .map(|r| (r, ReservedMemoryType::TdxPageTables)),
253 );
254 reserved.extend(
255 log_buffer
256 .into_iter()
257 .map(|r| (r, ReservedMemoryType::BootshimLogBuffer)),
258 );
259 reserved.sort_unstable_by_key(|(r, _)| r.start());
260
261 let mut used_ranges: ArrayVec<(MemoryRange, AddressUsage), 13> = ArrayVec::new();
262
263 for (entry, r) in walk_ranges(
266 core::iter::once((bootshim_used, AddressUsage::Used)),
267 reserved.iter().cloned(),
268 ) {
269 match r {
270 RangeWalkResult::Left(_) => {
271 used_ranges.push((entry, AddressUsage::Used));
272 }
273 RangeWalkResult::Both(_, reserved_type) => {
274 used_ranges.push((entry, AddressUsage::Reserved(reserved_type)));
275 }
276 RangeWalkResult::Right(typ) => {
277 return Err(Error::ReservedRangeOutsideBootshimUsed {
278 reserved: entry,
279 typ,
280 bootshim_used,
281 });
282 }
283 RangeWalkResult::Neither => {}
284 }
285 }
286
287 if let Some(range) = pool_range {
289 used_ranges.push((
290 range,
291 AddressUsage::Reserved(ReservedMemoryType::Vtl2GpaPool),
292 ));
293 manager.vtl2_pool = true;
294 }
295 used_ranges.sort_unstable_by_key(|(r, _)| r.start());
296
297 assert!(manager.address_space.is_empty());
299 for (entry, r) in walk_ranges(
300 vtl2_ram.iter().map(|e| (e.range, e.vnode)),
301 used_ranges.iter().map(|(r, usage)| (*r, usage)),
302 ) {
303 match r {
304 RangeWalkResult::Left(vnode) => {
305 manager.address_space.push(AddressRange {
307 range: entry,
308 vnode,
309 usage: AddressUsage::Free,
310 });
311 }
312 RangeWalkResult::Both(vnode, usage) => {
313 manager.address_space.push(AddressRange {
315 range: entry,
316 vnode,
317 usage: *usage,
318 });
319 }
320 RangeWalkResult::Right(usage) => {
321 panic!("vtl2 range {entry:#x?} used by {usage:?} not contained in vtl2 ram");
322 }
323 RangeWalkResult::Neither => {}
324 }
325 }
326
327 Ok(manager)
328 }
329}
330
331impl AddressSpaceManager {
332 pub const fn new_const() -> Self {
333 Self {
334 address_space: ArrayVec::new_const(),
335 vtl2_pool: false,
336 }
337 }
338
339 fn allocate_range(
345 &mut self,
346 index: usize,
347 len: u64,
348 usage: AddressUsage,
349 allocation_policy: AllocationPolicy,
350 alignment: Option<u64>,
351 ) -> AllocatedRange {
352 assert!(usage != AddressUsage::Free);
353 let range = self.address_space.get_mut(index).expect("valid index");
354 assert_eq!(range.usage, AddressUsage::Free);
355
356 let subrange = if let Some(alignment) = alignment {
357 range.range.aligned_subrange(alignment)
358 } else {
359 range.range
360 };
361
362 assert!(subrange.len() >= len);
363 assert_ne!(subrange, MemoryRange::EMPTY);
364
365 let used = match allocation_policy {
366 AllocationPolicy::LowMemory => {
367 let (used, _) = subrange.split_at_offset(len);
369 used
370 }
371 AllocationPolicy::HighMemory => {
372 let offset = subrange.len() - len;
374 let (_, used) = subrange.split_at_offset(offset);
375 used
376 }
377 };
378
379 let left = MemoryRange::new(range.range.start()..used.start());
380 let right = MemoryRange::new(used.end()..range.range.end());
381
382 let to_address_range = |r: MemoryRange| -> Option<AddressRange> {
383 if !r.is_empty() {
384 Some(AddressRange {
385 range: r,
386 vnode: range.vnode,
387 usage: AddressUsage::Free,
388 })
389 } else {
390 None
391 }
392 };
393
394 let left = to_address_range(left);
395 let right = to_address_range(right);
396
397 range.usage = usage;
399 range.range = used;
400 let allocated = AllocatedRange {
401 range: used,
402 vnode: range.vnode,
403 };
404
405 if let Some(right) = right {
406 self.address_space.insert(index + 1, right);
407 }
408
409 if let Some(left) = left {
410 self.address_space.insert(index, left);
411 }
412
413 allocated
414 }
415
416 fn allocate_inner(
417 &mut self,
418 required_vnode: Option<u32>,
419 len: u64,
420 allocation_type: AllocationType,
421 allocation_policy: AllocationPolicy,
422 alignment: Option<u64>,
423 ) -> Option<AllocatedRange> {
424 if len == 0 {
425 return None;
426 }
427
428 let len = len.div_ceil(PAGE_SIZE_4K) * PAGE_SIZE_4K;
431
432 fn find_index<'a>(
433 mut iter: impl Iterator<Item = (usize, &'a AddressRange)>,
434 preferred_vnode: Option<u32>,
435 len: u64,
436 alignment: Option<u64>,
437 ) -> Option<usize> {
438 iter.find_map(|(index, range)| {
439 let is_aligned: bool = alignment.is_none()
440 || (alignment.is_some()
441 && range.range.aligned_subrange(alignment.unwrap()).len() >= len);
442 if range.usage == AddressUsage::Free
443 && range.range.len() >= len
444 && preferred_vnode.map(|pv| pv == range.vnode).unwrap_or(true)
445 && is_aligned
446 {
447 Some(index)
448 } else {
449 None
450 }
451 })
452 }
453
454 let index = {
456 let iter = self.address_space.iter().enumerate();
457 match allocation_policy {
458 AllocationPolicy::LowMemory => find_index(iter, required_vnode, len, alignment),
459 AllocationPolicy::HighMemory => {
460 find_index(iter.rev(), required_vnode, len, alignment)
461 }
462 }
463 };
464
465 let address_usage = match allocation_type {
466 AllocationType::GpaPool => AddressUsage::Reserved(ReservedMemoryType::Vtl2GpaPool),
467 AllocationType::SidecarNode => AddressUsage::Reserved(ReservedMemoryType::SidecarNode),
468 AllocationType::TdxPageTables => {
469 AddressUsage::Reserved(ReservedMemoryType::TdxPageTables)
470 }
471 };
472
473 let alloc = index.map(|index| {
474 self.allocate_range(index, len, address_usage, allocation_policy, alignment)
475 });
476
477 if allocation_type == AllocationType::GpaPool && alloc.is_some() {
478 self.vtl2_pool = true;
479 }
480
481 alloc
482 }
483
484 pub fn allocate(
494 &mut self,
495 required_vnode: Option<u32>,
496 len: u64,
497 allocation_type: AllocationType,
498 allocation_policy: AllocationPolicy,
499 ) -> Option<AllocatedRange> {
500 self.allocate_inner(
501 required_vnode,
502 len,
503 allocation_type,
504 allocation_policy,
505 None,
506 )
507 }
508
509 #[cfg_attr(all(target_arch = "aarch64", not(test)), expect(dead_code))]
522 pub fn allocate_aligned(
523 &mut self,
524 required_vnode: Option<u32>,
525 len: u64,
526 allocation_type: AllocationType,
527 allocation_policy: AllocationPolicy,
528 alignment: u64,
529 ) -> Option<AllocatedRange> {
530 self.allocate_inner(
531 required_vnode,
532 len,
533 allocation_type,
534 allocation_policy,
535 Some(alignment),
536 )
537 }
538
539 pub fn vtl2_ranges(&self) -> impl Iterator<Item = (MemoryRange, MemoryVtlType)> + use<'_> {
541 memory_range::merge_adjacent_ranges(
542 self.address_space.iter().map(|r| (r.range, r.usage.into())),
543 )
544 }
545
546 pub fn reserved_vtl2_ranges(
549 &self,
550 ) -> impl Iterator<Item = (MemoryRange, ReservedMemoryType)> + use<'_> {
551 self.address_space.iter().filter_map(|r| match r.usage {
552 AddressUsage::Reserved(typ) => Some((r.range, typ)),
553 _ => None,
554 })
555 }
556
557 pub fn has_vtl2_pool(&self) -> bool {
559 self.vtl2_pool
560 }
561}
562
563#[derive(Debug, Clone, Copy, PartialEq, Eq)]
564pub enum AllocationType {
565 GpaPool,
566 SidecarNode,
567 #[cfg_attr(target_arch = "aarch64", expect(dead_code))]
568 TdxPageTables,
569}
570
571pub enum AllocationPolicy {
572 LowMemory,
574 HighMemory,
576}
577
578#[cfg(test)]
579mod tests {
580 use super::*;
581
582 #[test]
583 fn test_allocate() {
584 let mut address_space = AddressSpaceManager::new_const();
585 let vtl2_ram = &[MemoryEntry {
586 range: MemoryRange::new(0x0..0x20000),
587 vnode: 0,
588 mem_type: MemoryMapEntryType::MEMORY,
589 }];
590
591 AddressSpaceManagerBuilder::new(
592 &mut address_space,
593 vtl2_ram,
594 MemoryRange::new(0x0..0xF000),
595 MemoryRange::new(0x0..0x2000),
596 [
597 MemoryRange::new(0x3000..0x4000),
598 MemoryRange::new(0x5000..0x6000),
599 ]
600 .iter()
601 .cloned(),
602 )
603 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
604 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
605 .init()
606 .unwrap();
607
608 let range = address_space
609 .allocate(
610 None,
611 0x1000,
612 AllocationType::GpaPool,
613 AllocationPolicy::HighMemory,
614 )
615 .unwrap();
616 assert_eq!(range.range, MemoryRange::new(0x1F000..0x20000));
617 assert!(address_space.has_vtl2_pool());
618
619 let range = address_space
620 .allocate(
621 None,
622 0x2000,
623 AllocationType::GpaPool,
624 AllocationPolicy::HighMemory,
625 )
626 .unwrap();
627 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1F000));
628
629 let range = address_space
630 .allocate(
631 None,
632 0x3000,
633 AllocationType::GpaPool,
634 AllocationPolicy::LowMemory,
635 )
636 .unwrap();
637 assert_eq!(range.range, MemoryRange::new(0xF000..0x12000));
638
639 let range = address_space
640 .allocate(
641 None,
642 0x1000,
643 AllocationType::GpaPool,
644 AllocationPolicy::LowMemory,
645 )
646 .unwrap();
647 assert_eq!(range.range, MemoryRange::new(0x12000..0x13000));
648 }
649
650 #[test]
651 fn test_allocate_aligned() {
652 let mut address_space = AddressSpaceManager::new_const();
653 let vtl2_ram = &[MemoryEntry {
654 range: MemoryRange::new(0x0..0x20000),
655 vnode: 0,
656 mem_type: MemoryMapEntryType::MEMORY,
657 }];
658
659 AddressSpaceManagerBuilder::new(
660 &mut address_space,
661 vtl2_ram,
662 MemoryRange::new(0x0..0xF000),
663 MemoryRange::new(0x0..0x2000),
664 [
665 MemoryRange::new(0x3000..0x4000),
666 MemoryRange::new(0x5000..0x6000),
667 ]
668 .iter()
669 .cloned(),
670 )
671 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
672 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
673 .init()
674 .unwrap();
675
676 let alignment = 4096 * 16;
677 let range = address_space
678 .allocate_aligned(
679 None,
680 0x1000,
681 AllocationType::GpaPool,
682 AllocationPolicy::LowMemory,
683 alignment,
684 )
685 .unwrap();
686
687 assert_eq!(0, range.range.start() % alignment);
688
689 let alignment = 4096 * 4;
690 let range = address_space
691 .allocate_aligned(
692 None,
693 0x1000,
694 AllocationType::GpaPool,
695 AllocationPolicy::HighMemory,
696 alignment,
697 )
698 .unwrap();
699
700 assert_eq!(0, range.range.end() % alignment);
701 }
702
703 #[test]
704 fn test_failed_alignment() {
705 let mut address_space = AddressSpaceManager::new_const();
706 let vtl2_ram = &[MemoryEntry {
707 range: MemoryRange::new(0x0..0x20000),
708 vnode: 0,
709 mem_type: MemoryMapEntryType::MEMORY,
710 }];
711
712 AddressSpaceManagerBuilder::new(
713 &mut address_space,
714 vtl2_ram,
715 MemoryRange::new(0x0..0xF000),
716 MemoryRange::new(0x0..0x2000),
717 [
718 MemoryRange::new(0x3000..0x4000),
719 MemoryRange::new(0x5000..0x6000),
720 ]
721 .iter()
722 .cloned(),
723 )
724 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
725 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
726 .init()
727 .unwrap();
728
729 let alignment = 1024 * 1024 * 2;
730 let range = address_space.allocate_aligned(
731 None,
732 0x1000,
733 AllocationType::GpaPool,
734 AllocationPolicy::LowMemory,
735 alignment,
736 );
737 assert!(range.is_none());
738 }
739
740 #[test]
742 fn test_allocate_numa() {
743 let mut address_space = AddressSpaceManager::new_const();
744 let vtl2_ram = &[
745 MemoryEntry {
746 range: MemoryRange::new(0x0..0x20000),
747 vnode: 0,
748 mem_type: MemoryMapEntryType::MEMORY,
749 },
750 MemoryEntry {
751 range: MemoryRange::new(0x20000..0x40000),
752 vnode: 1,
753 mem_type: MemoryMapEntryType::MEMORY,
754 },
755 MemoryEntry {
756 range: MemoryRange::new(0x40000..0x60000),
757 vnode: 2,
758 mem_type: MemoryMapEntryType::MEMORY,
759 },
760 MemoryEntry {
761 range: MemoryRange::new(0x60000..0x80000),
762 vnode: 3,
763 mem_type: MemoryMapEntryType::MEMORY,
764 },
765 ];
766
767 AddressSpaceManagerBuilder::new(
768 &mut address_space,
769 vtl2_ram,
770 MemoryRange::new(0x0..0x10000),
771 MemoryRange::new(0x0..0x2000),
772 [
773 MemoryRange::new(0x3000..0x4000),
774 MemoryRange::new(0x5000..0x6000),
775 ]
776 .iter()
777 .cloned(),
778 )
779 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
780 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
781 .init()
782 .unwrap();
783
784 let range = address_space
785 .allocate(
786 Some(0),
787 0x1000,
788 AllocationType::GpaPool,
789 AllocationPolicy::HighMemory,
790 )
791 .unwrap();
792 assert_eq!(range.range, MemoryRange::new(0x1F000..0x20000));
793 assert_eq!(range.vnode, 0);
794
795 let range = address_space
796 .allocate(
797 Some(0),
798 0x2000,
799 AllocationType::SidecarNode,
800 AllocationPolicy::HighMemory,
801 )
802 .unwrap();
803 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1F000));
804 assert_eq!(range.vnode, 0);
805
806 let range = address_space
807 .allocate(
808 Some(2),
809 0x3000,
810 AllocationType::GpaPool,
811 AllocationPolicy::HighMemory,
812 )
813 .unwrap();
814 assert_eq!(range.range, MemoryRange::new(0x5D000..0x60000));
815 assert_eq!(range.vnode, 2);
816
817 let range = address_space
819 .allocate(
820 Some(3),
821 0x20000,
822 AllocationType::SidecarNode,
823 AllocationPolicy::HighMemory,
824 )
825 .unwrap();
826 assert_eq!(range.range, MemoryRange::new(0x60000..0x80000));
827 assert_eq!(range.vnode, 3);
828
829 let range = address_space.allocate(
830 Some(3),
831 0x1000,
832 AllocationType::SidecarNode,
833 AllocationPolicy::HighMemory,
834 );
835 assert!(
836 range.is_none(),
837 "allocation should fail, no space left for node 3"
838 );
839 }
840
841 #[test]
843 fn test_unaligned_allocations() {
844 let mut address_space = AddressSpaceManager::new_const();
845 let vtl2_ram = &[MemoryEntry {
846 range: MemoryRange::new(0x0..0x20000),
847 vnode: 0,
848 mem_type: MemoryMapEntryType::MEMORY,
849 }];
850
851 AddressSpaceManagerBuilder::new(
852 &mut address_space,
853 vtl2_ram,
854 MemoryRange::new(0x0..0xF000),
855 MemoryRange::new(0x0..0x2000),
856 [
857 MemoryRange::new(0x3000..0x4000),
858 MemoryRange::new(0x5000..0x6000),
859 ]
860 .iter()
861 .cloned(),
862 )
863 .with_reserved_range(MemoryRange::new(0x8000..0xA000))
864 .with_sidecar_image(MemoryRange::new(0xA000..0xC000))
865 .init()
866 .unwrap();
867
868 let range = address_space
869 .allocate(
870 None,
871 0x1001,
872 AllocationType::GpaPool,
873 AllocationPolicy::HighMemory,
874 )
875 .unwrap();
876 assert_eq!(range.range, MemoryRange::new(0x1E000..0x20000));
877
878 let range = address_space
879 .allocate(
880 None,
881 0xFFF,
882 AllocationType::GpaPool,
883 AllocationPolicy::HighMemory,
884 )
885 .unwrap();
886 assert_eq!(range.range, MemoryRange::new(0x1D000..0x1E000));
887
888 let range = address_space.allocate(
889 None,
890 0,
891 AllocationType::GpaPool,
892 AllocationPolicy::HighMemory,
893 );
894 assert!(range.is_none());
895 }
896
897 #[test]
899 fn test_invalid_init_ranges() {
900 let vtl2_ram = [MemoryEntry {
901 range: MemoryRange::new(0x0..0x20000),
902 vnode: 0,
903 mem_type: MemoryMapEntryType::MEMORY,
904 }];
905 let bootshim_used = MemoryRange::new(0x0..0xF000);
906
907 let mut address_space = AddressSpaceManager::new_const();
909
910 let result = AddressSpaceManagerBuilder::new(
911 &mut address_space,
912 &vtl2_ram,
913 bootshim_used,
914 MemoryRange::new(0x0..0x2000),
915 [MemoryRange::new(0x10000..0x11000)].iter().cloned(), )
917 .init();
918
919 assert!(matches!(
920 result,
921 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
922 ));
923
924 let mut address_space = AddressSpaceManager::new_const();
927 let result = AddressSpaceManagerBuilder::new(
928 &mut address_space,
929 &vtl2_ram,
930 bootshim_used,
931 MemoryRange::new(0x0..0x2000),
932 [MemoryRange::new(0xE000..0x10000)].iter().cloned(), )
934 .init();
935
936 assert!(matches!(
937 result,
938 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
939 ));
940
941 let mut address_space = AddressSpaceManager::new_const();
943 let result = AddressSpaceManagerBuilder::new(
944 &mut address_space,
945 &vtl2_ram,
946 bootshim_used,
947 MemoryRange::new(0x10000..0x14000), [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
949 )
950 .init();
951
952 assert!(matches!(
953 result,
954 Err(Error::ReservedRangeOutsideBootshimUsed { .. })
955 ));
956 }
957
958 #[test]
959 fn test_persisted_range() {
960 let vtl2_ram = [MemoryEntry {
961 range: MemoryRange::new(0x0..0x20000),
962 vnode: 0,
963 mem_type: MemoryMapEntryType::MEMORY,
964 }];
965 let bootshim_used = MemoryRange::new(0x0..0xF000);
966
967 let mut address_space = AddressSpaceManager::new_const();
968 AddressSpaceManagerBuilder::new(
969 &mut address_space,
970 &vtl2_ram,
971 bootshim_used,
972 MemoryRange::new(0x0..0xE000),
973 [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
974 )
975 .init()
976 .unwrap();
977
978 let expected = [
979 (
980 MemoryRange::new(0x0..0x1000),
981 MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
982 ),
983 (
984 MemoryRange::new(0x1000..0xE000),
985 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF,
986 ),
987 (MemoryRange::new(0xE000..0xF000), MemoryVtlType::VTL2_CONFIG),
988 (MemoryRange::new(0xF000..0x20000), MemoryVtlType::VTL2_RAM),
989 ];
990
991 for (expected, actual) in expected.iter().zip(address_space.vtl2_ranges()) {
992 assert_eq!(*expected, actual);
993 }
994
995 let mut address_space = AddressSpaceManager::new_const();
997 AddressSpaceManagerBuilder::new(
998 &mut address_space,
999 &vtl2_ram,
1000 bootshim_used,
1001 MemoryRange::new(0x0..0xA000),
1002 [MemoryRange::new(0xE000..0xF000)].iter().cloned(),
1003 )
1004 .init()
1005 .unwrap();
1006
1007 let expected = [
1008 (
1009 MemoryRange::new(0x0..0x1000),
1010 MemoryVtlType::VTL2_PERSISTED_STATE_HEADER,
1011 ),
1012 (
1013 MemoryRange::new(0x1000..0xA000),
1014 MemoryVtlType::VTL2_PERSISTED_STATE_PROTOBUF,
1015 ),
1016 (MemoryRange::new(0xA000..0xE000), MemoryVtlType::VTL2_RAM),
1017 (MemoryRange::new(0xE000..0xF000), MemoryVtlType::VTL2_CONFIG),
1018 (MemoryRange::new(0xF000..0x20000), MemoryVtlType::VTL2_RAM),
1019 ];
1020
1021 for (expected, actual) in expected.iter().zip(address_space.vtl2_ranges()) {
1022 assert_eq!(*expected, actual);
1023 }
1024 }
1025
1026 }