openhcl_boot/host_params/dt/
bump_alloc.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! A simple bump allocator that can be used in the bootloader.
5//!
6//! Note that we only allow allocations in a small window for supporting
7//! mesh_protobuf. Any other attempts to allocate will result in a panic.
8
9use crate::boot_logger::log;
10use crate::single_threaded::SingleThreaded;
11use core::alloc::GlobalAlloc;
12use core::alloc::Layout;
13use core::cell::RefCell;
14use memory_range::MemoryRange;
15
16// Only enable the bump allocator when compiling with minimal_rt, as otherwise
17// it will override the global allocator in unit tests which is not what we
18// want.
19#[cfg_attr(minimal_rt, global_allocator)]
20pub static ALLOCATOR: BumpAllocator = BumpAllocator::new();
21
22#[derive(Debug, PartialEq, Eq)]
23enum State {
24    /// Allocations can be enabled via `enable_alloc`.
25    Allowed,
26    /// Allocations are currently enabled.
27    Enabled,
28    /// Allocations are disabled and cannot be enabled again.
29    Disabled,
30}
31
32#[derive(Debug)]
33pub struct Inner {
34    start: *mut u8,
35    next: *mut u8,
36    end: *mut u8,
37    allow_alloc: State,
38    alloc_count: usize,
39}
40
41pub struct BumpAllocator {
42    inner: SingleThreaded<RefCell<Inner>>,
43}
44
45impl BumpAllocator {
46    pub const fn new() -> Self {
47        BumpAllocator {
48            inner: SingleThreaded(RefCell::new(Inner {
49                start: core::ptr::null_mut(),
50                next: core::ptr::null_mut(),
51                end: core::ptr::null_mut(),
52                allow_alloc: State::Allowed,
53                alloc_count: 0,
54            })),
55        }
56    }
57
58    /// Initialize the bump allocator with the specified memory range.
59    ///
60    /// # Safety
61    ///
62    /// The caller must guarantee that the memory range is both valid to
63    /// access via the current pagetable identity map, and that it is unused.
64    pub unsafe fn init(&self, mem: MemoryRange) {
65        let mut inner = self.inner.borrow_mut();
66        assert!(
67            inner.start.is_null(),
68            "bump allocator memory range previously set {:#x?}",
69            inner.start
70        );
71
72        inner.start = mem.start() as *mut u8;
73        inner.next = mem.start() as *mut u8;
74        inner.end = mem.end() as *mut u8;
75    }
76
77    /// Enable allocations. This panics if allocations were ever previously
78    /// enabled.
79    pub fn enable_alloc(&self) {
80        let mut inner = self.inner.borrow_mut();
81
82        inner.allow_alloc = match inner.allow_alloc {
83            State::Allowed => State::Enabled,
84            State::Enabled => {
85                panic!("allocations are already enabled");
86            }
87            State::Disabled => {
88                panic!("allocations were previously disabled and cannot be re-enabled");
89            }
90        };
91    }
92
93    /// Disable allocations. Panics if the allocator was not previously enabled.
94    pub fn disable_alloc(&self) {
95        let mut inner = self.inner.borrow_mut();
96        inner.allow_alloc = match inner.allow_alloc {
97            State::Allowed => panic!("allocations were never enabled"),
98            State::Enabled => State::Disabled,
99            State::Disabled => {
100                panic!("allocations were previously disabled and cannot be disabled again");
101            }
102        };
103    }
104
105    pub fn log_stats(&self) {
106        let inner = self.inner.borrow();
107
108        // SAFETY: The pointers are within the same original allocation,
109        // specified by init. They are u8 pointers, so there is no alignment
110        // requirement.
111        let (allocated, free) = unsafe {
112            (
113                inner.next.offset_from(inner.start),
114                inner.end.offset_from(inner.next),
115            )
116        };
117
118        log!(
119            "Bump allocator: allocated {} bytes in {} allocations ({} bytes free)",
120            allocated,
121            inner.alloc_count,
122            free
123        );
124    }
125}
126
127// SAFETY: The allocator points to a valid identity VA range via the
128// construction at init.
129unsafe impl GlobalAlloc for BumpAllocator {
130    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
131        let mut inner = self.inner.borrow_mut();
132
133        if inner.allow_alloc != State::Enabled {
134            panic!("allocations are not allowed {:?}", inner.allow_alloc);
135        }
136
137        let align_offset = inner.next.align_offset(layout.align());
138        let alloc_start = inner.next.wrapping_add(align_offset);
139        let alloc_end = alloc_start.wrapping_add(layout.size());
140
141        // If end overflowed this allocation is too large. If start overflowed,
142        // end will also overflow.
143        //
144        // Rust `Layout` guarantees that the size is not larger than `isize`,
145        // so it's not possible to wrap around twice.
146        if alloc_end < alloc_start {
147            return core::ptr::null_mut();
148        }
149
150        // TODO: renable allocation tracing when we support tracing levels via
151        // the log crate.
152
153        if alloc_end > inner.end {
154            core::ptr::null_mut() // out of memory
155        } else {
156            inner.next = alloc_end;
157            inner.alloc_count += 1;
158            alloc_start
159        }
160    }
161
162    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
163        // TODO: renable allocation tracing when we support tracing levels via
164        // the log crate.
165    }
166
167    // TODO: consider implementing realloc for the Vec grow case, which is the
168    // main usecase we see. This would mean supporting realloc if the allocation
169    // being realloced was the last one aka the tail.
170}
171
172#[cfg(nightly)]
173// SAFETY: The allocator points to a valid identity VA range via the
174// construction at init, the same as for `GlobalAlloc`.
175unsafe impl core::alloc::Allocator for &BumpAllocator {
176    fn allocate(
177        &self,
178        layout: Layout,
179    ) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
180        let ptr = unsafe { self.alloc(layout) };
181        if ptr.is_null() {
182            Err(core::alloc::AllocError)
183        } else {
184            unsafe {
185                Ok(core::ptr::NonNull::slice_from_raw_parts(
186                    core::ptr::NonNull::new_unchecked(ptr),
187                    layout.size(),
188                ))
189            }
190        }
191    }
192
193    unsafe fn deallocate(&self, ptr: core::ptr::NonNull<u8>, layout: Layout) {
194        log!("deallocate called on {:#x?} of size {}", ptr, layout.size());
195    }
196}
197
198#[cfg(nightly)]
199#[cfg(test)]
200mod tests {
201    use super::*;
202
203    // NOTE: run these tests with miri via
204    // `RUSTFLAGS="--cfg nightly" cargo +nightly miri test -p openhcl_boot`
205    #[test]
206    fn test_alloc() {
207        let buffer: Box<[u8]> = Box::new([0; 0x1000 * 20]);
208        let addr = Box::into_raw(buffer) as *mut u8;
209        let allocator = BumpAllocator {
210            inner: SingleThreaded(RefCell::new(Inner {
211                start: addr,
212                next: addr,
213                end: unsafe { addr.add(0x1000 * 20) },
214                allow_alloc: State::Allowed,
215                alloc_count: 0,
216            })),
217        };
218        allocator.enable_alloc();
219
220        unsafe {
221            let ptr1 = allocator.alloc(Layout::from_size_align(100, 8).unwrap());
222            *ptr1 = 42;
223            assert_eq!(*ptr1, 42);
224
225            let ptr2 = allocator.alloc(Layout::from_size_align(200, 16).unwrap());
226            *ptr2 = 55;
227            assert_eq!(*ptr2, 55);
228
229            let ptr3 = allocator.alloc(Layout::from_size_align(300, 32).unwrap());
230            *ptr3 = 77;
231            assert_eq!(*ptr3, 77);
232        }
233
234        {
235            let mut vec: Vec<u8, &BumpAllocator> = Vec::new_in(&allocator);
236
237            // Push 4096 bytes, which should force a vec realloc.
238            for i in 0..4096 {
239                vec.push(i as u8);
240            }
241
242            // force an explicit resize to 10000 bytes
243            vec.resize(10000, 0);
244        }
245
246        // Attempt to allocate a large chunk that is not available.
247        unsafe {
248            let ptr4 = allocator.alloc(Layout::from_size_align(0x1000 * 20, 8).unwrap());
249            assert!(ptr4.is_null());
250        }
251
252        // Recreate the box, then drop it so miri is satisfied.
253        let _buf = unsafe { Box::from_raw(core::ptr::slice_from_raw_parts_mut(addr, 0x1000 * 20)) };
254
255        allocator.log_stats();
256    }
257}