1#![forbid(unsafe_code)]
8#![no_std]
9
10use core::iter::Iterator;
11use core::iter::Peekable;
12use core::ops::Range;
13
14const PAGE_SIZE: u64 = 4096;
15const TWO_MB: u64 = 0x20_0000;
16const ONE_GB: u64 = 0x4000_0000;
17
18#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
24#[cfg_attr(
25 feature = "mesh",
26 derive(mesh_protobuf::Protobuf),
27 mesh(package = "topology")
28)]
29#[cfg_attr(feature = "inspect", derive(inspect::Inspect), inspect(display))]
30pub struct MemoryRange {
31 #[cfg_attr(feature = "mesh", mesh(1))]
32 start: u64,
33 #[cfg_attr(feature = "mesh", mesh(2))]
34 end: u64,
35}
36
37impl core::fmt::Display for MemoryRange {
38 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
39 write!(f, "{:#x}-{:#x}", self.start(), self.end())
40 }
41}
42
43impl TryFrom<Range<u64>> for MemoryRange {
44 type Error = InvalidMemoryRange;
45
46 fn try_from(range: Range<u64>) -> Result<Self, Self::Error> {
47 Self::try_new(range)
48 }
49}
50
51impl TryFrom<Range<usize>> for MemoryRange {
52 type Error = InvalidMemoryRange;
53
54 fn try_from(range: Range<usize>) -> Result<Self, Self::Error> {
55 Self::try_new(range.start as u64..range.end as u64)
56 }
57}
58
59#[derive(Debug, thiserror::Error)]
61#[error("unaligned or invalid memory range: {start:#x}-{end:#x}")]
62pub struct InvalidMemoryRange {
63 start: u64,
64 end: u64,
65}
66
67impl MemoryRange {
68 pub const MAX_ADDRESS: u64 = u64::MAX & !(PAGE_SIZE - 1);
70
71 #[track_caller]
76 pub const fn new(range: Range<u64>) -> Self {
77 assert!(range.start & (PAGE_SIZE - 1) == 0);
78 assert!(range.end & (PAGE_SIZE - 1) == 0);
79 assert!(range.start <= range.end);
80 Self {
81 start: range.start,
82 end: range.end,
83 }
84 }
85
86 pub const fn try_new(range: Range<u64>) -> Result<Self, InvalidMemoryRange> {
91 if range.start & (PAGE_SIZE - 1) != 0
92 || range.end & (PAGE_SIZE - 1) != 0
93 || range.start > range.end
94 {
95 return Err(InvalidMemoryRange {
96 start: range.start,
97 end: range.end,
98 });
99 }
100 Ok(Self {
101 start: range.start,
102 end: range.end,
103 })
104 }
105
106 pub fn bounding(range: Range<u64>) -> Self {
112 assert!(range.start <= range.end);
113 assert!(range.end < u64::MAX - PAGE_SIZE);
114 let start = range.start & !(PAGE_SIZE - 1);
115 let end = (range.end + (PAGE_SIZE - 1)) & !(PAGE_SIZE - 1);
116 Self::new(start..end)
117 }
118
119 pub fn from_4k_gpn_range(range: Range<u64>) -> Self {
124 const MAX: u64 = u64::MAX / PAGE_SIZE;
125 assert!(range.start <= MAX);
126 assert!(range.end <= MAX);
127 Self::new(range.start * PAGE_SIZE..range.end * PAGE_SIZE)
128 }
129
130 pub const EMPTY: Self = Self::new(0..0);
132
133 pub fn start(&self) -> u64 {
135 self.start
136 }
137
138 pub fn start_4k_gpn(&self) -> u64 {
140 self.start / PAGE_SIZE
141 }
142
143 pub fn end_4k_gpn(&self) -> u64 {
145 self.end / PAGE_SIZE
146 }
147
148 pub fn page_count_4k(&self) -> u64 {
150 (self.end - self.start) / PAGE_SIZE
151 }
152
153 pub fn page_count_2m(&self) -> u64 {
155 (self.end - self.start).div_ceil(TWO_MB)
156 }
157
158 pub fn end(&self) -> u64 {
160 self.end
161 }
162
163 pub fn len(&self) -> u64 {
165 self.end() - self.start()
166 }
167
168 pub fn is_empty(&self) -> bool {
170 self.start == self.end
171 }
172
173 pub fn alignment(&self, base: u64) -> u64 {
175 let order = ((base + self.start()) | (base + self.end())).trailing_zeros();
176 1 << order
177 }
178
179 pub fn aligned_subrange(&self, alignment: u64) -> Self {
184 assert!(alignment.is_power_of_two());
185 let start = (self.start + alignment - 1) & !(alignment - 1);
186 let end = self.end & !(alignment - 1);
187 if start <= end {
188 Self::new(start..end)
189 } else {
190 Self::EMPTY
191 }
192 }
193
194 pub fn overlaps(&self, other: &Self) -> bool {
196 self.end > other.start && self.start < other.end
197 }
198
199 pub fn contains(&self, other: &Self) -> bool {
201 self.start <= other.start && self.end >= other.end
202 }
203
204 pub fn contains_addr(&self, addr: u64) -> bool {
206 (self.start..self.end).contains(&addr)
207 }
208
209 pub fn offset_of(&self, addr: u64) -> Option<u64> {
211 if self.contains_addr(addr) {
212 Some(addr - self.start)
213 } else {
214 None
215 }
216 }
217
218 pub fn intersection(&self, other: &Self) -> Self {
220 let start = self.start.max(other.start);
221 let end = self.end.min(other.end);
222 if start <= end {
223 Self::new(start..end)
224 } else {
225 Self::EMPTY
226 }
227 }
228
229 #[track_caller]
233 pub fn split_at_offset(&self, offset: u64) -> (Self, Self) {
234 assert!(offset <= self.len());
235 assert!(offset % PAGE_SIZE == 0);
236 (
237 Self {
238 start: self.start,
239 end: self.start + offset,
240 },
241 Self {
242 start: self.start + offset,
243 end: self.end,
244 },
245 )
246 }
247}
248
249impl From<MemoryRange> for Range<u64> {
250 fn from(range: MemoryRange) -> Self {
251 Range {
252 start: range.start(),
253 end: range.end(),
254 }
255 }
256}
257
258#[derive(Debug, Clone)]
264pub struct AlignedSubranges {
265 range: MemoryRange,
266 offset: u64,
267 max_len: u64,
268}
269
270impl AlignedSubranges {
271 pub fn new(range: MemoryRange) -> Self {
273 Self {
274 range,
275 offset: 0,
276 max_len: u64::MAX,
277 }
278 }
279
280 pub fn with_offset(self, offset: u64) -> Self {
283 Self { offset, ..self }
284 }
285
286 pub fn with_max_range_len(self, max_len: u64) -> Self {
288 Self { max_len, ..self }
289 }
290}
291
292impl Iterator for AlignedSubranges {
293 type Item = MemoryRange;
294
295 fn next(&mut self) -> Option<Self::Item> {
296 if self.range.is_empty() {
297 return None;
298 }
299
300 let start = self.range.start() + self.offset;
301 let mut end = self.range.end() + self.offset;
302 if end - start > self.max_len {
303 end = start + self.max_len;
304 }
305
306 let mut align = |page_size| {
307 let page_mask: u64 = page_size - 1;
308 if (start + page_mask) & !page_mask >= end & !page_mask {
309 return;
312 }
313 if start & page_mask != 0 {
314 end = end.min((start + page_mask) & !page_mask);
316 } else {
317 end &= !page_mask;
319 }
320 };
321
322 align(TWO_MB);
323 align(ONE_GB);
324 let start = start - self.offset;
325 let end = end - self.offset;
326 self.range = MemoryRange::new(end..self.range.end());
327 Some(MemoryRange::new(start..end))
328 }
329}
330
331pub fn overlapping_ranges(
338 left: impl IntoIterator<Item = MemoryRange>,
339 right: impl IntoIterator<Item = MemoryRange>,
340) -> impl Iterator<Item = MemoryRange> {
341 walk_ranges(
342 left.into_iter().map(|r| (r, ())),
343 right.into_iter().map(|r| (r, ())),
344 )
345 .filter_map(|(r, c)| match c {
346 RangeWalkResult::Both((), ()) => Some(r),
347 _ => None,
348 })
349}
350
351pub fn subtract_ranges(
359 left: impl IntoIterator<Item = MemoryRange>,
360 right: impl IntoIterator<Item = MemoryRange>,
361) -> impl Iterator<Item = MemoryRange> {
362 walk_ranges(
363 left.into_iter().map(|r| (r, ())),
364 right.into_iter().map(|r| (r, ())),
365 )
366 .filter_map(|(r, c)| match c {
367 RangeWalkResult::Left(()) => Some(r),
368 RangeWalkResult::Neither | RangeWalkResult::Right(()) | RangeWalkResult::Both((), ()) => {
369 None
370 }
371 })
372}
373
374pub fn walk_ranges<T: Clone, U: Clone>(
403 left: impl IntoIterator<Item = (MemoryRange, T)>,
404 right: impl IntoIterator<Item = (MemoryRange, U)>,
405) -> impl Iterator<Item = (MemoryRange, RangeWalkResult<T, U>)> {
406 RangeWalkIter {
407 pos: 0,
408 left: PeekableSorted::new(left),
409 right: PeekableSorted::new(right),
410 }
411}
412
413#[derive(Copy, Clone, Debug, PartialEq, Eq)]
415pub enum RangeWalkResult<T, U> {
416 Neither,
418 Left(T),
421 Right(U),
424 Both(T, U),
427}
428
429struct RangeWalkIter<I: Iterator, J: Iterator> {
430 pos: u64,
431 left: PeekableSorted<I>,
432 right: PeekableSorted<J>,
433}
434
435struct PeekableSorted<I: Iterator> {
436 iter: I,
437 #[expect(clippy::option_option)] item: Option<Option<I::Item>>,
439}
440
441impl<I: Iterator<Item = (MemoryRange, T)>, T> PeekableSorted<I> {
442 fn new(iter: impl IntoIterator<IntoIter = I>) -> Self {
443 Self {
444 iter: iter.into_iter(),
445 item: None,
446 }
447 }
448
449 fn peek_in_range_ensure_sorted(&mut self, pos: u64, msg: &str) -> Option<&(MemoryRange, T)> {
450 loop {
451 let r = self
452 .item
453 .get_or_insert_with(|| {
454 let r = self.iter.next()?;
455 assert!(r.0.start() >= pos, "{msg} not sorted");
456 Some(r)
457 })
458 .as_ref()?;
459 if !r.0.is_empty() && r.0.end() > pos {
460 return Some(self.item.as_ref().unwrap().as_ref().unwrap());
461 }
462 self.item = None;
463 }
464 }
465}
466
467impl<I: Iterator<Item = (MemoryRange, T)>, J: Iterator<Item = (MemoryRange, U)>, T: Clone, U: Clone>
468 Iterator for RangeWalkIter<I, J>
469{
470 type Item = (MemoryRange, RangeWalkResult<T, U>);
471
472 fn next(&mut self) -> Option<Self::Item> {
473 if self.pos == MemoryRange::MAX_ADDRESS {
474 return None;
475 }
476 let left = self.left.peek_in_range_ensure_sorted(self.pos, "left");
477 let right = self.right.peek_in_range_ensure_sorted(self.pos, "right");
478 let (end, c) = match (left, right) {
479 (Some(&(left, ref t)), Some(&(right, ref u))) => {
480 if self.pos < left.start() {
481 if self.pos < right.start() {
482 (left.start().min(right.start()), RangeWalkResult::Neither)
483 } else {
484 (
485 left.start().min(right.end()),
486 RangeWalkResult::Right(u.clone()),
487 )
488 }
489 } else if self.pos < right.start() {
490 (
491 right.start().min(left.end()),
492 RangeWalkResult::Left(t.clone()),
493 )
494 } else {
495 (
496 left.end().min(right.end()),
497 RangeWalkResult::Both(t.clone(), u.clone()),
498 )
499 }
500 }
501 (Some(&(left, ref t)), None) => {
502 if self.pos < left.start() {
503 (left.start, RangeWalkResult::Neither)
504 } else {
505 (left.end(), RangeWalkResult::Left(t.clone()))
506 }
507 }
508 (None, Some(&(right, ref u))) => {
509 if self.pos < right.start() {
510 (right.start, RangeWalkResult::Neither)
511 } else {
512 (right.end(), RangeWalkResult::Right(u.clone()))
513 }
514 }
515 (None, None) => (MemoryRange::MAX_ADDRESS, RangeWalkResult::Neither),
516 };
517 let r = MemoryRange::new(self.pos..end);
518 self.pos = end;
519 Some((r, c))
520 }
521}
522
523pub fn flatten_ranges(
546 ranges: impl IntoIterator<Item = MemoryRange>,
547) -> impl Iterator<Item = MemoryRange> {
548 FlattenIter {
549 iter: ranges.into_iter().peekable(),
550 }
551}
552
553struct FlattenIter<I: Iterator> {
554 iter: Peekable<I>,
555}
556
557impl<I: Iterator<Item = MemoryRange>> Iterator for FlattenIter<I> {
558 type Item = MemoryRange;
559
560 fn next(&mut self) -> Option<Self::Item> {
561 let first = self.iter.next()?;
562 let mut start = first.start();
563 let mut end = first.end();
564 while let Some(r) = self.iter.next_if(|r| {
565 assert!(r.start() >= start, "ranges are not sorted");
566 r.start() <= end
567 }) {
568 start = r.start();
569 end = end.max(r.end());
570 }
571 Some(MemoryRange::new(first.start()..end))
572 }
573}
574
575pub fn merge_adjacent_ranges<T: PartialEq>(
605 ranges: impl IntoIterator<Item = (MemoryRange, T)>,
606) -> impl Iterator<Item = (MemoryRange, T)> {
607 MergeAdjacentIter {
608 iter: ranges.into_iter().peekable(),
609 }
610}
611
612struct MergeAdjacentIter<I: Iterator> {
613 iter: Peekable<I>,
614}
615
616impl<I: Iterator<Item = (MemoryRange, T)>, T: PartialEq> Iterator for MergeAdjacentIter<I> {
617 type Item = (MemoryRange, T);
618
619 fn next(&mut self) -> Option<Self::Item> {
620 let (first, typ) = self.iter.next()?;
621 let mut start = first.start();
622 let mut end = first.end();
623 while let Some((r, _t)) = self.iter.next_if(|(r, t)| {
624 assert!(r.start() >= start, "ranges are not sorted");
625 assert!(r.start() >= end, "ranges overlap");
626 r.start() == end && &typ == t
627 }) {
628 start = r.start();
629 end = end.max(r.end());
630 }
631 Some((MemoryRange::new(first.start()..end), typ))
632 }
633}
634
635#[cfg(test)]
636mod tests {
637 extern crate alloc;
638 use super::MemoryRange;
639 use super::TWO_MB;
640 use crate::AlignedSubranges;
641 use crate::flatten_ranges;
642 use crate::merge_adjacent_ranges;
643 use crate::overlapping_ranges;
644 use crate::subtract_ranges;
645 use alloc::vec;
646 use alloc::vec::Vec;
647
648 const KB: u64 = 1024;
649 const MB: u64 = 1024 * KB;
650 const GB: u64 = 1024 * MB;
651
652 #[test]
653 fn test_align() {
654 #[derive(Clone, Debug, PartialEq, Copy)]
655 struct AlignedRangeResult {
656 range: MemoryRange,
657 page_size: u64,
658 }
659
660 let compare_with_base = |r1: Vec<MemoryRange>, base: u64, er: Vec<AlignedRangeResult>| {
661 let result: Vec<_> = r1
662 .iter()
663 .flat_map(|range| AlignedSubranges::new(*range).with_offset(base))
664 .collect();
665 assert_eq!(result.len(), er.len());
666 for (pos, range) in result.iter().enumerate() {
667 assert_eq!(*range, er[pos].range);
668 assert_eq!(range.alignment(base), er[pos].page_size);
669 }
670 };
671
672 let compare = |r1: Vec<MemoryRange>, er: Vec<AlignedRangeResult>| {
673 compare_with_base(r1, 0, er);
674 };
675
676 fn b_mr(start: u64, end: u64) -> MemoryRange {
678 MemoryRange::new(start..end)
679 }
680
681 fn b_arr(range: MemoryRange, page_size: u64) -> AlignedRangeResult {
683 AlignedRangeResult { range, page_size }
684 }
685
686 let ram: Vec<_> = vec![b_mr(0x0, 4 * KB)];
688 let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 12)];
689
690 compare(ram, expected_res);
691
692 let ram: Vec<_> = vec![b_mr(0x0, 2 * MB)];
694 let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 21)];
695 compare(ram, expected_res);
696
697 let ram: Vec<_> = vec![b_mr(0x0, GB)];
699 let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 30)];
700 compare(ram, expected_res);
701
702 let ram: Vec<_> = vec![b_mr(6 * MB, 12 * MB + 4 * KB)];
704 let expected_res: Vec<_> = vec![
705 b_arr(b_mr(6 * MB, 12 * MB), TWO_MB),
706 b_arr(b_mr(12 * MB, 12 * MB + 4 * KB), 1 << 12),
707 ];
708 compare(ram, expected_res);
709
710 let ram: Vec<_> = vec![b_mr(5 * MB + 400 * KB, 12 * MB + 400 * KB)];
712 let expected_res: Vec<_> = vec![
713 b_arr(b_mr(5 * MB + 400 * KB, 6 * MB), 1 << 14),
714 b_arr(b_mr(6 * MB, 12 * MB), TWO_MB),
715 b_arr(b_mr(12 * MB, 12 * MB + 400 * KB), 1 << 14),
716 ];
717 compare(ram, expected_res);
718
719 let ram: Vec<_> = vec![b_mr(GB + 501 * MB, 3 * GB + 503 * MB)];
721 let expected_res: Vec<_> = vec![
722 b_arr(b_mr(GB + 501 * MB, GB + 502 * MB), 1 << 20),
723 b_arr(b_mr(GB + 502 * MB, 2 * GB), 1 << 21),
724 b_arr(b_mr(2 * GB, 3 * GB), 1 << 30),
725 b_arr(b_mr(3 * GB, 3 * GB + 502 * MB), 1 << 21),
726 b_arr(b_mr(3 * GB + 502 * MB, 3 * GB + 503 * MB), 1 << 20),
727 ];
728 compare(ram, expected_res);
729
730 let ram: Vec<_> = vec![b_mr(4 * MB + 8 * KB, 6 * MB + 8 * KB)];
732 let base = 2 * MB - 8 * KB;
733 let expected_res: Vec<_> = vec![b_arr(ram[0], TWO_MB)];
734 compare_with_base(ram, base, expected_res);
735
736 let ram: Vec<_> = vec![b_mr(4 * MB + 8 * KB, 6 * MB + 8 * KB)];
738 let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 13)];
739 compare_with_base(ram, 0, expected_res);
740 }
741
742 #[test]
743 fn test_overlapping_ranges() {
744 let left = [
745 MemoryRange::new(0..4 * MB),
746 MemoryRange::new(8 * MB..12 * MB),
747 MemoryRange::new(12 * MB..12 * MB),
748 MemoryRange::new(16 * MB..20 * MB),
749 MemoryRange::new(24 * MB..32 * MB),
750 MemoryRange::new(40 * MB..48 * MB),
751 ];
752 let right = [
753 MemoryRange::new(2 * MB..6 * MB),
754 MemoryRange::new(10 * MB..11 * MB),
755 MemoryRange::new(11 * MB..11 * MB),
756 MemoryRange::new(11 * MB..13 * MB),
757 MemoryRange::new(15 * MB..22 * MB),
758 MemoryRange::new(26 * MB..30 * MB),
759 ];
760
761 let result: Vec<_> = overlapping_ranges(left, right).collect();
762 assert_eq!(
763 result.as_slice(),
764 &[
765 MemoryRange::new(2 * MB..4 * MB),
766 MemoryRange::new(10 * MB..11 * MB),
767 MemoryRange::new(11 * MB..12 * MB),
768 MemoryRange::new(16 * MB..20 * MB),
769 MemoryRange::new(26 * MB..30 * MB),
770 ]
771 );
772 }
773
774 #[test]
775 fn test_subtract_ranges() {
776 let left = [
777 MemoryRange::new(0..4 * MB),
778 MemoryRange::new(8 * MB..12 * MB),
779 MemoryRange::new(12 * MB..12 * MB),
780 MemoryRange::new(16 * MB..20 * MB),
781 MemoryRange::new(24 * MB..32 * MB),
782 MemoryRange::new(40 * MB..48 * MB),
783 ];
784 let right = [
785 MemoryRange::new(2 * MB..6 * MB),
786 MemoryRange::new(10 * MB..11 * MB),
787 MemoryRange::new(11 * MB..11 * MB),
788 MemoryRange::new(11 * MB..13 * MB),
789 MemoryRange::new(15 * MB..22 * MB),
790 MemoryRange::new(26 * MB..30 * MB),
791 ];
792
793 let result: Vec<_> = subtract_ranges(left, right).collect();
794 assert_eq!(
795 result.as_slice(),
796 &[
797 MemoryRange::new(0..2 * MB),
798 MemoryRange::new(8 * MB..10 * MB),
799 MemoryRange::new(24 * MB..26 * MB),
800 MemoryRange::new(30 * MB..32 * MB),
801 MemoryRange::new(40 * MB..48 * MB),
802 ]
803 );
804 }
805
806 #[test]
807 #[should_panic(expected = "left not sorted")]
808 fn test_panic_unsorted_overlapping_left() {
809 overlapping_ranges(
810 [MemoryRange::new(MB..2 * MB), MemoryRange::new(0..MB)],
811 [MemoryRange::new(3 * MB..4 * MB)],
812 )
813 .for_each(|_| ());
814 }
815
816 #[test]
817 #[should_panic(expected = "right not sorted")]
818 fn test_panic_unsorted_overlapping_right() {
819 overlapping_ranges(
820 [
821 MemoryRange::new(MB..2 * MB),
822 MemoryRange::new(3 * MB..4 * MB),
823 ],
824 [MemoryRange::new(0..MB), MemoryRange::new(0..MB)],
825 )
826 .for_each(|_| ());
827 }
828
829 #[test]
830 #[should_panic(expected = "left not sorted")]
831 fn test_panic_unsorted_subtract_left() {
832 subtract_ranges(
833 [MemoryRange::new(MB..2 * MB), MemoryRange::new(0..MB)],
834 [MemoryRange::new(MB..2 * MB)],
835 )
836 .for_each(|_| ());
837 }
838
839 #[test]
840 #[should_panic(expected = "right not sorted")]
841 fn test_panic_unsorted_subtract_right() {
842 subtract_ranges(
843 [
844 MemoryRange::new(MB..2 * MB),
845 MemoryRange::new(3 * MB..4 * MB),
846 ],
847 [MemoryRange::new(MB..2 * MB), MemoryRange::new(MB..2 * MB)],
848 )
849 .for_each(|_| ());
850 }
851
852 #[test]
853 fn test_aligned_subrange() {
854 let test_cases = &[
855 (0..0, MB, 0..0),
856 (0..MB, MB, 0..MB),
857 (4 * KB..MB + 4 * KB, MB, MB..MB),
858 (MB..5 * MB, 2 * MB, 2 * MB..4 * MB),
859 ];
860 for (range, alignment, expected_aligned_range) in test_cases.iter().cloned() {
861 assert_eq!(
862 MemoryRange::new(range).aligned_subrange(alignment),
863 MemoryRange::new(expected_aligned_range)
864 );
865 }
866 }
867
868 #[test]
869 fn test_flatten_ranges() {
870 let ranges =
871 [0..4, 5..7, 6..11, 13..20, 20..25, 22..24, 35..36].map(MemoryRange::from_4k_gpn_range);
872 let result = [0..4, 5..11, 13..25, 35..36].map(MemoryRange::from_4k_gpn_range);
873 assert!(flatten_ranges(ranges).eq(result));
874 }
875
876 #[test]
877 #[should_panic(expected = "ranges are not sorted")]
878 fn test_flatten_ranges_not_sorted() {
879 flatten_ranges([0..4, 5..7, 3..8].map(MemoryRange::from_4k_gpn_range)).for_each(|_| ());
880 }
881
882 #[test]
883 fn test_merge_adjacent_ranges() {
884 #[derive(Clone, Copy, PartialEq, Eq)]
885 enum Color {
886 Red,
887 Blue,
888 }
889
890 let ranges = [0..4, 5..7, 7..11, 11..12, 13..20, 20..25, 35..36]
891 .map(MemoryRange::from_4k_gpn_range)
892 .into_iter()
893 .zip([
894 Color::Red,
895 Color::Red,
896 Color::Red,
897 Color::Blue,
898 Color::Red,
899 Color::Red,
900 Color::Blue,
901 ]);
902 let result = [0..4, 5..11, 11..12, 13..25, 35..36]
903 .map(MemoryRange::from_4k_gpn_range)
904 .into_iter()
905 .zip([Color::Red, Color::Red, Color::Blue, Color::Red, Color::Blue]);
906 assert!(merge_adjacent_ranges(ranges).eq(result));
907 }
908
909 #[test]
910 #[should_panic(expected = "ranges are not sorted")]
911 fn test_merge_adjacent_ranges_not_sorted() {
912 merge_adjacent_ranges([0..4, 5..7, 3..8].map(|r| (MemoryRange::from_4k_gpn_range(r), ())))
913 .for_each(|_| ());
914 }
915
916 #[test]
917 #[should_panic(expected = "ranges overlap")]
918 fn test_merge_adjacent_ranges_overlap() {
919 merge_adjacent_ranges([0..6, 5..7, 9..12].map(|r| (MemoryRange::from_4k_gpn_range(r), ())))
920 .for_each(|_| ());
921 }
922}