memory_range/
lib.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! The [`MemoryRange`] type, which represents a 4KB-page-aligned byte range of
5//! memory, plus algorithms operating on the type.
6
7#![forbid(unsafe_code)]
8#![no_std]
9
10use core::iter::Iterator;
11use core::iter::Peekable;
12use core::ops::Range;
13
14const PAGE_SIZE: u64 = 4096;
15const TWO_MB: u64 = 0x20_0000;
16const ONE_GB: u64 = 0x4000_0000;
17
18/// Represents a page-aligned byte range of memory.
19///
20/// This type has a stable `Protobuf` representation, and can be directly used
21/// in saved state.
22// TODO: enforce invariants during de/serialization
23#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
24#[cfg_attr(
25    feature = "mesh",
26    derive(mesh_protobuf::Protobuf),
27    mesh(package = "topology")
28)]
29#[cfg_attr(feature = "inspect", derive(inspect::Inspect), inspect(display))]
30pub struct MemoryRange {
31    #[cfg_attr(feature = "mesh", mesh(1))]
32    start: u64,
33    #[cfg_attr(feature = "mesh", mesh(2))]
34    end: u64,
35}
36
37impl core::fmt::Display for MemoryRange {
38    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
39        write!(f, "{:#x}-{:#x}", self.start(), self.end())
40    }
41}
42
43impl TryFrom<Range<u64>> for MemoryRange {
44    type Error = InvalidMemoryRange;
45
46    fn try_from(range: Range<u64>) -> Result<Self, Self::Error> {
47        Self::try_new(range)
48    }
49}
50
51impl TryFrom<Range<usize>> for MemoryRange {
52    type Error = InvalidMemoryRange;
53
54    fn try_from(range: Range<usize>) -> Result<Self, Self::Error> {
55        Self::try_new(range.start as u64..range.end as u64)
56    }
57}
58
59/// Error returned by [`MemoryRange::try_new`].
60#[derive(Debug, thiserror::Error)]
61#[error("unaligned or invalid memory range: {start:#x}-{end:#x}")]
62pub struct InvalidMemoryRange {
63    start: u64,
64    end: u64,
65}
66
67impl MemoryRange {
68    /// The maximum address that can be represented by a `MemoryRange`.
69    pub const MAX_ADDRESS: u64 = u64::MAX & !(PAGE_SIZE - 1);
70
71    /// Returns a new range for the given guest address range.
72    ///
73    /// Panics if the start or end are not 4KB aligned or if the start is after
74    /// the end.
75    #[track_caller]
76    pub const fn new(range: Range<u64>) -> Self {
77        assert!(range.start & (PAGE_SIZE - 1) == 0);
78        assert!(range.end & (PAGE_SIZE - 1) == 0);
79        assert!(range.start <= range.end);
80        Self {
81            start: range.start,
82            end: range.end,
83        }
84    }
85
86    /// Returns a new range for the given guest address range.
87    ///
88    /// Returns `None` if the start or end are not 4KB aligned or if the start
89    /// is after the end.
90    pub const fn try_new(range: Range<u64>) -> Result<Self, InvalidMemoryRange> {
91        if range.start & (PAGE_SIZE - 1) != 0
92            || range.end & (PAGE_SIZE - 1) != 0
93            || range.start > range.end
94        {
95            return Err(InvalidMemoryRange {
96                start: range.start,
97                end: range.end,
98            });
99        }
100        Ok(Self {
101            start: range.start,
102            end: range.end,
103        })
104    }
105
106    /// Returns the smallest 4K-aligned range that contains the given address
107    /// range.
108    ///
109    /// Panics if the start is after the end or if the end address is in the
110    /// last page of the 64-bit space.
111    pub fn bounding(range: Range<u64>) -> Self {
112        assert!(range.start <= range.end);
113        assert!(range.end < u64::MAX - PAGE_SIZE);
114        let start = range.start & !(PAGE_SIZE - 1);
115        let end = (range.end + (PAGE_SIZE - 1)) & !(PAGE_SIZE - 1);
116        Self::new(start..end)
117    }
118
119    /// Returns a new range for the given guest 4KB page range.
120    ///
121    /// Panics if the start is after the end or if the start address or end
122    /// address overflow.
123    pub fn from_4k_gpn_range(range: Range<u64>) -> Self {
124        const MAX: u64 = u64::MAX / PAGE_SIZE;
125        assert!(range.start <= MAX);
126        assert!(range.end <= MAX);
127        Self::new(range.start * PAGE_SIZE..range.end * PAGE_SIZE)
128    }
129
130    /// The empty range, with start and end addresses of zero.
131    pub const EMPTY: Self = Self::new(0..0);
132
133    /// The start address.
134    pub fn start(&self) -> u64 {
135        self.start
136    }
137
138    /// The start address as a 4KB page number.
139    pub fn start_4k_gpn(&self) -> u64 {
140        self.start / PAGE_SIZE
141    }
142
143    /// The end address as a 4KB page number.
144    pub fn end_4k_gpn(&self) -> u64 {
145        self.end / PAGE_SIZE
146    }
147
148    /// The number of 4KB pages in the range.
149    pub fn page_count_4k(&self) -> u64 {
150        (self.end - self.start) / PAGE_SIZE
151    }
152
153    /// The number of 2MB pages in the range.
154    pub fn page_count_2m(&self) -> u64 {
155        (self.end - self.start).div_ceil(TWO_MB)
156    }
157
158    /// The end address.
159    pub fn end(&self) -> u64 {
160        self.end
161    }
162
163    /// The length of the range in bytes.
164    pub fn len(&self) -> u64 {
165        self.end() - self.start()
166    }
167
168    /// Check if the range is empty.
169    pub fn is_empty(&self) -> bool {
170        self.start == self.end
171    }
172
173    /// Gets the biggest page size possible for the range.
174    pub fn alignment(&self, base: u64) -> u64 {
175        let order = ((base + self.start()) | (base + self.end())).trailing_zeros();
176        1 << order
177    }
178
179    /// Returns the largest range contained in this range whose start and end
180    /// are aligned to `alignment` bytes. This may be the empty range.
181    ///
182    /// Panics if `alignment` is not a power of two.
183    pub fn aligned_subrange(&self, alignment: u64) -> Self {
184        assert!(alignment.is_power_of_two());
185        let start = (self.start + alignment - 1) & !(alignment - 1);
186        let end = self.end & !(alignment - 1);
187        if start <= end {
188            Self::new(start..end)
189        } else {
190            Self::EMPTY
191        }
192    }
193
194    /// Returns whether `self` and `other` overlap.
195    pub fn overlaps(&self, other: &Self) -> bool {
196        self.end > other.start && self.start < other.end
197    }
198
199    /// Returns whether `self` contains `other`.
200    pub fn contains(&self, other: &Self) -> bool {
201        self.start <= other.start && self.end >= other.end
202    }
203
204    /// Returns whether `self` contains the byte at `addr`.
205    pub fn contains_addr(&self, addr: u64) -> bool {
206        (self.start..self.end).contains(&addr)
207    }
208
209    /// Returns the byte offset of `addr` within the range, if it is contained.
210    pub fn offset_of(&self, addr: u64) -> Option<u64> {
211        if self.contains_addr(addr) {
212            Some(addr - self.start)
213        } else {
214            None
215        }
216    }
217
218    /// Returns the intersection of `self` and `other`.
219    pub fn intersection(&self, other: &Self) -> Self {
220        let start = self.start.max(other.start);
221        let end = self.end.min(other.end);
222        if start <= end {
223            Self::new(start..end)
224        } else {
225            Self::EMPTY
226        }
227    }
228
229    /// Split the range at the given byte offset within the range.
230    ///
231    /// Panics if `offset` is not within the range or is not page-aligned.
232    #[track_caller]
233    pub fn split_at_offset(&self, offset: u64) -> (Self, Self) {
234        assert!(offset <= self.len());
235        assert!(offset % PAGE_SIZE == 0);
236        (
237            Self {
238                start: self.start,
239                end: self.start + offset,
240            },
241            Self {
242                start: self.start + offset,
243                end: self.end,
244            },
245        )
246    }
247}
248
249impl From<MemoryRange> for Range<u64> {
250    fn from(range: MemoryRange) -> Self {
251        Range {
252            start: range.start(),
253            end: range.end(),
254        }
255    }
256}
257
258/// Iterator over aligned subranges of a memory range.
259///
260/// Each subrange will be made up of aligned pages of size 4KB, 2MB, or 1GB.
261/// Subrange boundaries are chosen to output the maximum number of the largest
262/// pages.
263#[derive(Debug, Clone)]
264pub struct AlignedSubranges {
265    range: MemoryRange,
266    offset: u64,
267    max_len: u64,
268}
269
270impl AlignedSubranges {
271    /// Returns a new iterator of subranges in `range`.
272    pub fn new(range: MemoryRange) -> Self {
273        Self {
274            range,
275            offset: 0,
276            max_len: u64::MAX,
277        }
278    }
279
280    /// Returns an iterator that considers subrange alignment offset by `offset`
281    /// bytes.
282    pub fn with_offset(self, offset: u64) -> Self {
283        Self { offset, ..self }
284    }
285
286    /// Returns an iterator that outputs subranges only up to `max_len` bytes.
287    pub fn with_max_range_len(self, max_len: u64) -> Self {
288        Self { max_len, ..self }
289    }
290}
291
292impl Iterator for AlignedSubranges {
293    type Item = MemoryRange;
294
295    fn next(&mut self) -> Option<Self::Item> {
296        if self.range.is_empty() {
297            return None;
298        }
299
300        let start = self.range.start() + self.offset;
301        let mut end = self.range.end() + self.offset;
302        if end - start > self.max_len {
303            end = start + self.max_len;
304        }
305
306        let mut align = |page_size| {
307            let page_mask: u64 = page_size - 1;
308            if (start + page_mask) & !page_mask >= end & !page_mask {
309                // No sense in aligning this, since we won't get any aligned
310                // pages.
311                return;
312            }
313            if start & page_mask != 0 {
314                // Align the next range's start.
315                end = end.min((start + page_mask) & !page_mask);
316            } else {
317                // Align this range's end.
318                end &= !page_mask;
319            }
320        };
321
322        align(TWO_MB);
323        align(ONE_GB);
324        let start = start - self.offset;
325        let end = end - self.offset;
326        self.range = MemoryRange::new(end..self.range.end());
327        Some(MemoryRange::new(start..end))
328    }
329}
330
331/// Returns an iterator over memory ranges that are in both `left` and `right`.
332///
333/// For example, if `left` is `[0..4MB, 8MB..12MB]` and `right` is `[2MB..6MB, 10MB..11MB]`,
334/// the resulting iterator will yield `[2MB..4MB, 10MB..11MB]`.
335///
336/// Panics if `left` or `right` are not sorted or are overlapping.
337pub fn overlapping_ranges(
338    left: impl IntoIterator<Item = MemoryRange>,
339    right: impl IntoIterator<Item = MemoryRange>,
340) -> impl Iterator<Item = MemoryRange> {
341    walk_ranges(
342        left.into_iter().map(|r| (r, ())),
343        right.into_iter().map(|r| (r, ())),
344    )
345    .filter_map(|(r, c)| match c {
346        RangeWalkResult::Both((), ()) => Some(r),
347        _ => None,
348    })
349}
350
351/// Returns an iterator over the ranges in `left` that are not in `right`.
352///
353/// For example, if `left` is `[0..4MB, 8MB..12MB]` and `right` is `[2MB..6MB,
354/// 10MB..11MB]`, the resulting iterator will yield `[0..2MB, 8MB..10MB,
355/// 11MB..12MB]`.
356///
357/// Panics if `left` or `right` are not sorted or are overlapping.
358pub fn subtract_ranges(
359    left: impl IntoIterator<Item = MemoryRange>,
360    right: impl IntoIterator<Item = MemoryRange>,
361) -> impl Iterator<Item = MemoryRange> {
362    walk_ranges(
363        left.into_iter().map(|r| (r, ())),
364        right.into_iter().map(|r| (r, ())),
365    )
366    .filter_map(|(r, c)| match c {
367        RangeWalkResult::Left(()) => Some(r),
368        RangeWalkResult::Neither | RangeWalkResult::Right(()) | RangeWalkResult::Both((), ()) => {
369            None
370        }
371    })
372}
373
374/// Returns an iterator that computes the overlapping state of the ranges in
375/// `left` and `right`.
376///
377/// The iterator yields a tuple of a [`MemoryRange`] and a [`RangeWalkResult`]
378/// enum that indicates whether each subrange is only in `left`, only in
379/// `right`, in both, or in neither.
380///
381/// Panics if `left` or `right` are not sorted.
382///
383/// # Examples
384///
385/// ```
386/// # use memory_range::{MemoryRange, RangeWalkResult, walk_ranges};
387/// let left = [(MemoryRange::new(0x100000..0x400000), "first"), (MemoryRange::new(0x800000..0xc00000), "second")];
388/// let right = [(MemoryRange::new(0x200000..0x900000), 1000), (MemoryRange::new(0x900000..0xa00000), 2000)];
389/// let v: Vec<_> = walk_ranges(left, right).collect();
390/// let expected = [
391///     (MemoryRange::new(0..0x100000), RangeWalkResult::Neither),
392///     (MemoryRange::new(0x100000..0x200000), RangeWalkResult::Left("first")),
393///     (MemoryRange::new(0x200000..0x400000), RangeWalkResult::Both("first", 1000)),
394///     (MemoryRange::new(0x400000..0x800000), RangeWalkResult::Right(1000)),
395///     (MemoryRange::new(0x800000..0x900000), RangeWalkResult::Both("second", 1000)),
396///     (MemoryRange::new(0x900000..0xa00000), RangeWalkResult::Both("second", 2000)),
397///     (MemoryRange::new(0xa00000..0xc00000), RangeWalkResult::Left("second")),
398///     (MemoryRange::new(0xc00000..MemoryRange::MAX_ADDRESS), RangeWalkResult::Neither),
399/// ];
400/// assert_eq!(v.as_slice(), expected.as_slice());
401/// ```
402pub fn walk_ranges<T: Clone, U: Clone>(
403    left: impl IntoIterator<Item = (MemoryRange, T)>,
404    right: impl IntoIterator<Item = (MemoryRange, U)>,
405) -> impl Iterator<Item = (MemoryRange, RangeWalkResult<T, U>)> {
406    RangeWalkIter {
407        pos: 0,
408        left: PeekableSorted::new(left),
409        right: PeekableSorted::new(right),
410    }
411}
412
413/// The result of an iteration of [`walk_ranges`].
414#[derive(Copy, Clone, Debug, PartialEq, Eq)]
415pub enum RangeWalkResult<T, U> {
416    /// Neither iterator contains this range.
417    Neither,
418    /// Only the left iterator contains this range, in the element with the
419    /// given value.
420    Left(T),
421    /// Only the right iterator contains this range, in the element with the
422    /// given value.
423    Right(U),
424    /// Both iterators contain this range, in the elements with the given
425    /// values.
426    Both(T, U),
427}
428
429struct RangeWalkIter<I: Iterator, J: Iterator> {
430    pos: u64,
431    left: PeekableSorted<I>,
432    right: PeekableSorted<J>,
433}
434
435struct PeekableSorted<I: Iterator> {
436    iter: I,
437    #[expect(clippy::option_option)] // `Some(None)` is used to remember that `iter` is empty.
438    item: Option<Option<I::Item>>,
439}
440
441impl<I: Iterator<Item = (MemoryRange, T)>, T> PeekableSorted<I> {
442    fn new(iter: impl IntoIterator<IntoIter = I>) -> Self {
443        Self {
444            iter: iter.into_iter(),
445            item: None,
446        }
447    }
448
449    fn peek_in_range_ensure_sorted(&mut self, pos: u64, msg: &str) -> Option<&(MemoryRange, T)> {
450        loop {
451            let r = self
452                .item
453                .get_or_insert_with(|| {
454                    let r = self.iter.next()?;
455                    assert!(r.0.start() >= pos, "{msg} not sorted");
456                    Some(r)
457                })
458                .as_ref()?;
459            if !r.0.is_empty() && r.0.end() > pos {
460                return Some(self.item.as_ref().unwrap().as_ref().unwrap());
461            }
462            self.item = None;
463        }
464    }
465}
466
467impl<I: Iterator<Item = (MemoryRange, T)>, J: Iterator<Item = (MemoryRange, U)>, T: Clone, U: Clone>
468    Iterator for RangeWalkIter<I, J>
469{
470    type Item = (MemoryRange, RangeWalkResult<T, U>);
471
472    fn next(&mut self) -> Option<Self::Item> {
473        if self.pos == MemoryRange::MAX_ADDRESS {
474            return None;
475        }
476        let left = self.left.peek_in_range_ensure_sorted(self.pos, "left");
477        let right = self.right.peek_in_range_ensure_sorted(self.pos, "right");
478        let (end, c) = match (left, right) {
479            (Some(&(left, ref t)), Some(&(right, ref u))) => {
480                if self.pos < left.start() {
481                    if self.pos < right.start() {
482                        (left.start().min(right.start()), RangeWalkResult::Neither)
483                    } else {
484                        (
485                            left.start().min(right.end()),
486                            RangeWalkResult::Right(u.clone()),
487                        )
488                    }
489                } else if self.pos < right.start() {
490                    (
491                        right.start().min(left.end()),
492                        RangeWalkResult::Left(t.clone()),
493                    )
494                } else {
495                    (
496                        left.end().min(right.end()),
497                        RangeWalkResult::Both(t.clone(), u.clone()),
498                    )
499                }
500            }
501            (Some(&(left, ref t)), None) => {
502                if self.pos < left.start() {
503                    (left.start, RangeWalkResult::Neither)
504                } else {
505                    (left.end(), RangeWalkResult::Left(t.clone()))
506                }
507            }
508            (None, Some(&(right, ref u))) => {
509                if self.pos < right.start() {
510                    (right.start, RangeWalkResult::Neither)
511                } else {
512                    (right.end(), RangeWalkResult::Right(u.clone()))
513                }
514            }
515            (None, None) => (MemoryRange::MAX_ADDRESS, RangeWalkResult::Neither),
516        };
517        let r = MemoryRange::new(self.pos..end);
518        self.pos = end;
519        Some((r, c))
520    }
521}
522
523/// Takes a sequence of memory ranges, sorted by their start address, and
524/// returns an iterator over the flattened ranges, where overlapping and
525/// adjacent ranges are merged and deduplicated.
526///
527/// Panics if the input ranges are not sorted by their start address.
528///
529/// # Example
530/// ```rust
531/// # use memory_range::{flatten_ranges, MemoryRange};
532/// let ranges = [
533///     MemoryRange::new(0x1000..0x2000),
534///     MemoryRange::new(0x2000..0x5000),
535///     MemoryRange::new(0x4000..0x6000),
536///     MemoryRange::new(0x5000..0x6000),
537///     MemoryRange::new(0x8000..0x9000),
538/// ];
539/// let flattened = [
540///     MemoryRange::new(0x1000..0x6000),
541///     MemoryRange::new(0x8000..0x9000),
542/// ];
543/// assert!(flatten_ranges(ranges).eq(flattened));
544/// ```
545pub fn flatten_ranges(
546    ranges: impl IntoIterator<Item = MemoryRange>,
547) -> impl Iterator<Item = MemoryRange> {
548    FlattenIter {
549        iter: ranges.into_iter().peekable(),
550    }
551}
552
553struct FlattenIter<I: Iterator> {
554    iter: Peekable<I>,
555}
556
557impl<I: Iterator<Item = MemoryRange>> Iterator for FlattenIter<I> {
558    type Item = MemoryRange;
559
560    fn next(&mut self) -> Option<Self::Item> {
561        let first = self.iter.next()?;
562        let mut start = first.start();
563        let mut end = first.end();
564        while let Some(r) = self.iter.next_if(|r| {
565            assert!(r.start() >= start, "ranges are not sorted");
566            r.start() <= end
567        }) {
568            start = r.start();
569            end = end.max(r.end());
570        }
571        Some(MemoryRange::new(first.start()..end))
572    }
573}
574
575/// Similar to [`flatten_ranges`], but considers ranges non-equivalent if their
576/// associated tags differ.
577///
578/// Panics if the input ranges are not sorted by their start address, or if
579/// ranges overlap.
580///
581/// # Example
582/// ```rust
583/// # use memory_range::{merge_adjacent_ranges, MemoryRange};
584///
585/// #[derive(Clone, Copy, Debug, PartialEq, Eq)]
586/// enum Color {
587///    Red,
588///    Blue,
589/// }
590///
591/// let ranges = [
592///     (MemoryRange::new(0x1000..0x2000), Color::Red),
593///     (MemoryRange::new(0x2000..0x5000), Color::Red),
594///     (MemoryRange::new(0x5000..0x6000), Color::Blue),
595///     (MemoryRange::new(0x8000..0x9000), Color::Red),
596/// ];
597/// let flattened = [
598///     (MemoryRange::new(0x1000..0x5000), Color::Red),
599///     (MemoryRange::new(0x5000..0x6000), Color::Blue),
600///     (MemoryRange::new(0x8000..0x9000), Color::Red),
601/// ];
602/// assert!(merge_adjacent_ranges(ranges).eq(flattened));
603/// ```
604pub fn merge_adjacent_ranges<T: PartialEq>(
605    ranges: impl IntoIterator<Item = (MemoryRange, T)>,
606) -> impl Iterator<Item = (MemoryRange, T)> {
607    MergeAdjacentIter {
608        iter: ranges.into_iter().peekable(),
609    }
610}
611
612struct MergeAdjacentIter<I: Iterator> {
613    iter: Peekable<I>,
614}
615
616impl<I: Iterator<Item = (MemoryRange, T)>, T: PartialEq> Iterator for MergeAdjacentIter<I> {
617    type Item = (MemoryRange, T);
618
619    fn next(&mut self) -> Option<Self::Item> {
620        let (first, typ) = self.iter.next()?;
621        let mut start = first.start();
622        let mut end = first.end();
623        while let Some((r, _t)) = self.iter.next_if(|(r, t)| {
624            assert!(r.start() >= start, "ranges are not sorted");
625            assert!(r.start() >= end, "ranges overlap");
626            r.start() == end && &typ == t
627        }) {
628            start = r.start();
629            end = end.max(r.end());
630        }
631        Some((MemoryRange::new(first.start()..end), typ))
632    }
633}
634
635#[cfg(test)]
636mod tests {
637    extern crate alloc;
638    use super::MemoryRange;
639    use super::TWO_MB;
640    use crate::AlignedSubranges;
641    use crate::flatten_ranges;
642    use crate::merge_adjacent_ranges;
643    use crate::overlapping_ranges;
644    use crate::subtract_ranges;
645    use alloc::vec;
646    use alloc::vec::Vec;
647
648    const KB: u64 = 1024;
649    const MB: u64 = 1024 * KB;
650    const GB: u64 = 1024 * MB;
651
652    #[test]
653    fn test_align() {
654        #[derive(Clone, Debug, PartialEq, Copy)]
655        struct AlignedRangeResult {
656            range: MemoryRange,
657            page_size: u64,
658        }
659
660        let compare_with_base = |r1: Vec<MemoryRange>, base: u64, er: Vec<AlignedRangeResult>| {
661            let result: Vec<_> = r1
662                .iter()
663                .flat_map(|range| AlignedSubranges::new(*range).with_offset(base))
664                .collect();
665            assert_eq!(result.len(), er.len());
666            for (pos, range) in result.iter().enumerate() {
667                assert_eq!(*range, er[pos].range);
668                assert_eq!(range.alignment(base), er[pos].page_size);
669            }
670        };
671
672        let compare = |r1: Vec<MemoryRange>, er: Vec<AlignedRangeResult>| {
673            compare_with_base(r1, 0, er);
674        };
675
676        /// Builds a memory range with short hand.
677        fn b_mr(start: u64, end: u64) -> MemoryRange {
678            MemoryRange::new(start..end)
679        }
680
681        /// Builds a aligned range result with short hand.
682        fn b_arr(range: MemoryRange, page_size: u64) -> AlignedRangeResult {
683            AlignedRangeResult { range, page_size }
684        }
685
686        // [0, 4KB]
687        let ram: Vec<_> = vec![b_mr(0x0, 4 * KB)];
688        let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 12)];
689
690        compare(ram, expected_res);
691
692        // [0, 2MB]
693        let ram: Vec<_> = vec![b_mr(0x0, 2 * MB)];
694        let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 21)];
695        compare(ram, expected_res);
696
697        // [0, 1MB]
698        let ram: Vec<_> = vec![b_mr(0x0, GB)];
699        let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 30)];
700        compare(ram, expected_res);
701
702        // [6MB, 12.004MB]
703        let ram: Vec<_> = vec![b_mr(6 * MB, 12 * MB + 4 * KB)];
704        let expected_res: Vec<_> = vec![
705            b_arr(b_mr(6 * MB, 12 * MB), TWO_MB),
706            b_arr(b_mr(12 * MB, 12 * MB + 4 * KB), 1 << 12),
707        ];
708        compare(ram, expected_res);
709
710        // [5.4MB, 12.2MB]
711        let ram: Vec<_> = vec![b_mr(5 * MB + 400 * KB, 12 * MB + 400 * KB)];
712        let expected_res: Vec<_> = vec![
713            b_arr(b_mr(5 * MB + 400 * KB, 6 * MB), 1 << 14),
714            b_arr(b_mr(6 * MB, 12 * MB), TWO_MB),
715            b_arr(b_mr(12 * MB, 12 * MB + 400 * KB), 1 << 14),
716        ];
717        compare(ram, expected_res);
718
719        // [1.501GB, 3.503GB]
720        let ram: Vec<_> = vec![b_mr(GB + 501 * MB, 3 * GB + 503 * MB)];
721        let expected_res: Vec<_> = vec![
722            b_arr(b_mr(GB + 501 * MB, GB + 502 * MB), 1 << 20),
723            b_arr(b_mr(GB + 502 * MB, 2 * GB), 1 << 21),
724            b_arr(b_mr(2 * GB, 3 * GB), 1 << 30),
725            b_arr(b_mr(3 * GB, 3 * GB + 502 * MB), 1 << 21),
726            b_arr(b_mr(3 * GB + 502 * MB, 3 * GB + 503 * MB), 1 << 20),
727        ];
728        compare(ram, expected_res);
729
730        // [4.008MB, 6.008MB] with necessary base to align up to 2MB
731        let ram: Vec<_> = vec![b_mr(4 * MB + 8 * KB, 6 * MB + 8 * KB)];
732        let base = 2 * MB - 8 * KB;
733        let expected_res: Vec<_> = vec![b_arr(ram[0], TWO_MB)];
734        compare_with_base(ram, base, expected_res);
735
736        // [4.008MB, 6.008MB] without any base
737        let ram: Vec<_> = vec![b_mr(4 * MB + 8 * KB, 6 * MB + 8 * KB)];
738        let expected_res: Vec<_> = vec![b_arr(ram[0], 1 << 13)];
739        compare_with_base(ram, 0, expected_res);
740    }
741
742    #[test]
743    fn test_overlapping_ranges() {
744        let left = [
745            MemoryRange::new(0..4 * MB),
746            MemoryRange::new(8 * MB..12 * MB),
747            MemoryRange::new(12 * MB..12 * MB),
748            MemoryRange::new(16 * MB..20 * MB),
749            MemoryRange::new(24 * MB..32 * MB),
750            MemoryRange::new(40 * MB..48 * MB),
751        ];
752        let right = [
753            MemoryRange::new(2 * MB..6 * MB),
754            MemoryRange::new(10 * MB..11 * MB),
755            MemoryRange::new(11 * MB..11 * MB),
756            MemoryRange::new(11 * MB..13 * MB),
757            MemoryRange::new(15 * MB..22 * MB),
758            MemoryRange::new(26 * MB..30 * MB),
759        ];
760
761        let result: Vec<_> = overlapping_ranges(left, right).collect();
762        assert_eq!(
763            result.as_slice(),
764            &[
765                MemoryRange::new(2 * MB..4 * MB),
766                MemoryRange::new(10 * MB..11 * MB),
767                MemoryRange::new(11 * MB..12 * MB),
768                MemoryRange::new(16 * MB..20 * MB),
769                MemoryRange::new(26 * MB..30 * MB),
770            ]
771        );
772    }
773
774    #[test]
775    fn test_subtract_ranges() {
776        let left = [
777            MemoryRange::new(0..4 * MB),
778            MemoryRange::new(8 * MB..12 * MB),
779            MemoryRange::new(12 * MB..12 * MB),
780            MemoryRange::new(16 * MB..20 * MB),
781            MemoryRange::new(24 * MB..32 * MB),
782            MemoryRange::new(40 * MB..48 * MB),
783        ];
784        let right = [
785            MemoryRange::new(2 * MB..6 * MB),
786            MemoryRange::new(10 * MB..11 * MB),
787            MemoryRange::new(11 * MB..11 * MB),
788            MemoryRange::new(11 * MB..13 * MB),
789            MemoryRange::new(15 * MB..22 * MB),
790            MemoryRange::new(26 * MB..30 * MB),
791        ];
792
793        let result: Vec<_> = subtract_ranges(left, right).collect();
794        assert_eq!(
795            result.as_slice(),
796            &[
797                MemoryRange::new(0..2 * MB),
798                MemoryRange::new(8 * MB..10 * MB),
799                MemoryRange::new(24 * MB..26 * MB),
800                MemoryRange::new(30 * MB..32 * MB),
801                MemoryRange::new(40 * MB..48 * MB),
802            ]
803        );
804    }
805
806    #[test]
807    #[should_panic(expected = "left not sorted")]
808    fn test_panic_unsorted_overlapping_left() {
809        overlapping_ranges(
810            [MemoryRange::new(MB..2 * MB), MemoryRange::new(0..MB)],
811            [MemoryRange::new(3 * MB..4 * MB)],
812        )
813        .for_each(|_| ());
814    }
815
816    #[test]
817    #[should_panic(expected = "right not sorted")]
818    fn test_panic_unsorted_overlapping_right() {
819        overlapping_ranges(
820            [
821                MemoryRange::new(MB..2 * MB),
822                MemoryRange::new(3 * MB..4 * MB),
823            ],
824            [MemoryRange::new(0..MB), MemoryRange::new(0..MB)],
825        )
826        .for_each(|_| ());
827    }
828
829    #[test]
830    #[should_panic(expected = "left not sorted")]
831    fn test_panic_unsorted_subtract_left() {
832        subtract_ranges(
833            [MemoryRange::new(MB..2 * MB), MemoryRange::new(0..MB)],
834            [MemoryRange::new(MB..2 * MB)],
835        )
836        .for_each(|_| ());
837    }
838
839    #[test]
840    #[should_panic(expected = "right not sorted")]
841    fn test_panic_unsorted_subtract_right() {
842        subtract_ranges(
843            [
844                MemoryRange::new(MB..2 * MB),
845                MemoryRange::new(3 * MB..4 * MB),
846            ],
847            [MemoryRange::new(MB..2 * MB), MemoryRange::new(MB..2 * MB)],
848        )
849        .for_each(|_| ());
850    }
851
852    #[test]
853    fn test_aligned_subrange() {
854        let test_cases = &[
855            (0..0, MB, 0..0),
856            (0..MB, MB, 0..MB),
857            (4 * KB..MB + 4 * KB, MB, MB..MB),
858            (MB..5 * MB, 2 * MB, 2 * MB..4 * MB),
859        ];
860        for (range, alignment, expected_aligned_range) in test_cases.iter().cloned() {
861            assert_eq!(
862                MemoryRange::new(range).aligned_subrange(alignment),
863                MemoryRange::new(expected_aligned_range)
864            );
865        }
866    }
867
868    #[test]
869    fn test_flatten_ranges() {
870        let ranges =
871            [0..4, 5..7, 6..11, 13..20, 20..25, 22..24, 35..36].map(MemoryRange::from_4k_gpn_range);
872        let result = [0..4, 5..11, 13..25, 35..36].map(MemoryRange::from_4k_gpn_range);
873        assert!(flatten_ranges(ranges).eq(result));
874    }
875
876    #[test]
877    #[should_panic(expected = "ranges are not sorted")]
878    fn test_flatten_ranges_not_sorted() {
879        flatten_ranges([0..4, 5..7, 3..8].map(MemoryRange::from_4k_gpn_range)).for_each(|_| ());
880    }
881
882    #[test]
883    fn test_merge_adjacent_ranges() {
884        #[derive(Clone, Copy, PartialEq, Eq)]
885        enum Color {
886            Red,
887            Blue,
888        }
889
890        let ranges = [0..4, 5..7, 7..11, 11..12, 13..20, 20..25, 35..36]
891            .map(MemoryRange::from_4k_gpn_range)
892            .into_iter()
893            .zip([
894                Color::Red,
895                Color::Red,
896                Color::Red,
897                Color::Blue,
898                Color::Red,
899                Color::Red,
900                Color::Blue,
901            ]);
902        let result = [0..4, 5..11, 11..12, 13..25, 35..36]
903            .map(MemoryRange::from_4k_gpn_range)
904            .into_iter()
905            .zip([Color::Red, Color::Red, Color::Blue, Color::Red, Color::Blue]);
906        assert!(merge_adjacent_ranges(ranges).eq(result));
907    }
908
909    #[test]
910    #[should_panic(expected = "ranges are not sorted")]
911    fn test_merge_adjacent_ranges_not_sorted() {
912        merge_adjacent_ranges([0..4, 5..7, 3..8].map(|r| (MemoryRange::from_4k_gpn_range(r), ())))
913            .for_each(|_| ());
914    }
915
916    #[test]
917    #[should_panic(expected = "ranges overlap")]
918    fn test_merge_adjacent_ranges_overlap() {
919        merge_adjacent_ranges([0..6, 5..7, 9..12].map(|r| (MemoryRange::from_4k_gpn_range(r), ())))
920            .for_each(|_| ());
921    }
922}