openhcl_boot/host_params/dt/
bump_alloc.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! A simple bump allocator that can be used in the bootloader.
5//!
6//! Note that we only allow allocations in a small window for supporting
7//! mesh_protobuf. Any other attempts to allocate will result in a panic.
8
9use crate::boot_logger::log;
10use crate::single_threaded::SingleThreaded;
11use core::alloc::GlobalAlloc;
12use core::alloc::Layout;
13use core::cell::RefCell;
14use memory_range::MemoryRange;
15
16// Only enable the bump allocator when compiling with minimal_rt, as otherwise
17// it will override the global allocator in unit tests which is not what we
18// want.
19#[cfg_attr(minimal_rt, global_allocator)]
20pub static ALLOCATOR: BumpAllocator = BumpAllocator::new();
21
22#[derive(Debug, PartialEq, Eq)]
23enum State {
24    /// Allocations can be enabled via `enable_alloc`.
25    Allowed,
26    /// Allocations are currently enabled.
27    Enabled,
28    /// Allocations are disabled and cannot be enabled again.
29    Disabled,
30}
31
32#[derive(Debug)]
33pub struct Inner {
34    start: *mut u8,
35    next: *mut u8,
36    end: *mut u8,
37    allow_alloc: State,
38    alloc_count: usize,
39}
40
41pub struct BumpAllocator {
42    inner: SingleThreaded<RefCell<Inner>>,
43}
44
45impl BumpAllocator {
46    pub const fn new() -> Self {
47        BumpAllocator {
48            inner: SingleThreaded(RefCell::new(Inner {
49                start: core::ptr::null_mut(),
50                next: core::ptr::null_mut(),
51                end: core::ptr::null_mut(),
52                allow_alloc: State::Allowed,
53                alloc_count: 0,
54            })),
55        }
56    }
57
58    /// Initialize the bump allocator with the specified memory range.
59    ///
60    /// # Safety
61    ///
62    /// The caller must guarantee that the memory range is both valid to
63    /// access via the current pagetable identity map, and that it is unused.
64    pub unsafe fn init(&self, mem: MemoryRange) {
65        let mut inner = self.inner.borrow_mut();
66        assert!(
67            inner.start.is_null(),
68            "bump allocator memory range previously set {:#x?}",
69            inner.start
70        );
71
72        inner.start = mem.start() as *mut u8;
73        inner.next = mem.start() as *mut u8;
74        inner.end = mem.end() as *mut u8;
75    }
76
77    /// Enable allocations. This panics if allocations were ever previously
78    /// enabled.
79    fn enable_alloc(&self) {
80        let mut inner = self.inner.borrow_mut();
81
82        inner.allow_alloc = match inner.allow_alloc {
83            State::Allowed => State::Enabled,
84            State::Enabled => {
85                panic!("allocations are already enabled");
86            }
87            State::Disabled => {
88                panic!("allocations were previously disabled and cannot be re-enabled");
89            }
90        };
91    }
92
93    /// Disable allocations. Panics if the allocator was not previously enabled.
94    fn disable_alloc(&self) {
95        let mut inner = self.inner.borrow_mut();
96        inner.allow_alloc = match inner.allow_alloc {
97            State::Allowed => panic!("allocations were never enabled"),
98            State::Enabled => State::Disabled,
99            State::Disabled => {
100                panic!("allocations were previously disabled and cannot be disabled again");
101            }
102        };
103    }
104
105    fn log_stats(&self) {
106        let inner = self.inner.borrow();
107
108        // SAFETY: The pointers are within the same original allocation,
109        // specified by init. They are u8 pointers, so there is no alignment
110        // requirement.
111        let (allocated, free) = unsafe {
112            (
113                inner.next.offset_from(inner.start),
114                inner.end.offset_from(inner.next),
115            )
116        };
117
118        log!(
119            "Bump allocator: allocated {} bytes in {} allocations ({} bytes free)",
120            allocated,
121            inner.alloc_count,
122            free
123        );
124    }
125}
126
127/// Run the provided closure with enabling the global bump allocator. This is
128/// only intended to be used for mesh_protobuf decode.
129///
130/// Note that if the global allocator was ever used before, this function will
131/// panic.
132pub fn with_global_alloc<T>(f: impl FnOnce() -> T) -> T {
133    ALLOCATOR.enable_alloc();
134    let val = f();
135    ALLOCATOR.disable_alloc();
136    ALLOCATOR.log_stats();
137    val
138}
139
140// SAFETY: The allocator points to a valid identity VA range via the
141// construction at init.
142unsafe impl GlobalAlloc for BumpAllocator {
143    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
144        let mut inner = self.inner.borrow_mut();
145
146        if inner.allow_alloc != State::Enabled {
147            panic!("allocations are not allowed {:?}", inner.allow_alloc);
148        }
149
150        let align_offset = inner.next.align_offset(layout.align());
151        let alloc_start = inner.next.wrapping_add(align_offset);
152        let alloc_end = alloc_start.wrapping_add(layout.size());
153
154        // If end overflowed this allocation is too large. If start overflowed,
155        // end will also overflow.
156        //
157        // Rust `Layout` guarantees that the size is not larger than `isize`,
158        // so it's not possible to wrap around twice.
159        if alloc_end < alloc_start {
160            return core::ptr::null_mut();
161        }
162
163        // TODO: renable allocation tracing when we support tracing levels via
164        // the log crate.
165
166        if alloc_end > inner.end {
167            core::ptr::null_mut() // out of memory
168        } else {
169            inner.next = alloc_end;
170            inner.alloc_count += 1;
171            alloc_start
172        }
173    }
174
175    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
176        // TODO: renable allocation tracing when we support tracing levels via
177        // the log crate.
178    }
179
180    // TODO: consider implementing realloc for the Vec grow case, which is the
181    // main usecase we see. This would mean supporting realloc if the allocation
182    // being realloced was the last one aka the tail.
183}
184
185#[cfg(nightly)]
186// SAFETY: The allocator points to a valid identity VA range via the
187// construction at init, the same as for `GlobalAlloc`.
188unsafe impl core::alloc::Allocator for &BumpAllocator {
189    fn allocate(
190        &self,
191        layout: Layout,
192    ) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
193        let ptr = unsafe { self.alloc(layout) };
194        if ptr.is_null() {
195            Err(core::alloc::AllocError)
196        } else {
197            unsafe {
198                Ok(core::ptr::NonNull::slice_from_raw_parts(
199                    core::ptr::NonNull::new_unchecked(ptr),
200                    layout.size(),
201                ))
202            }
203        }
204    }
205
206    unsafe fn deallocate(&self, ptr: core::ptr::NonNull<u8>, layout: Layout) {
207        log!("deallocate called on {:#x?} of size {}", ptr, layout.size());
208    }
209}
210
211#[cfg(nightly)]
212#[cfg(test)]
213mod tests {
214    use super::*;
215
216    // NOTE: run these tests with miri via
217    // `RUSTFLAGS="--cfg nightly" cargo +nightly miri test -p openhcl_boot`
218    #[test]
219    fn test_alloc() {
220        let buffer: Box<[u8]> = Box::new([0; 0x1000 * 20]);
221        let addr = Box::into_raw(buffer) as *mut u8;
222        let allocator = BumpAllocator {
223            inner: SingleThreaded(RefCell::new(Inner {
224                start: addr,
225                next: addr,
226                end: unsafe { addr.add(0x1000 * 20) },
227                allow_alloc: State::Allowed,
228                alloc_count: 0,
229            })),
230        };
231        allocator.enable_alloc();
232
233        unsafe {
234            let ptr1 = allocator.alloc(Layout::from_size_align(100, 8).unwrap());
235            *ptr1 = 42;
236            assert_eq!(*ptr1, 42);
237
238            let ptr2 = allocator.alloc(Layout::from_size_align(200, 16).unwrap());
239            *ptr2 = 55;
240            assert_eq!(*ptr2, 55);
241
242            let ptr3 = allocator.alloc(Layout::from_size_align(300, 32).unwrap());
243            *ptr3 = 77;
244            assert_eq!(*ptr3, 77);
245        }
246
247        {
248            let mut vec: Vec<u8, &BumpAllocator> = Vec::new_in(&allocator);
249
250            // Push 4096 bytes, which should force a vec realloc.
251            for i in 0..4096 {
252                vec.push(i as u8);
253            }
254
255            // force an explicit resize to 10000 bytes
256            vec.resize(10000, 0);
257        }
258
259        // Attempt to allocate a large chunk that is not available.
260        unsafe {
261            let ptr4 = allocator.alloc(Layout::from_size_align(0x1000 * 20, 8).unwrap());
262            assert!(ptr4.is_null());
263        }
264
265        // Recreate the box, then drop it so miri is satisfied.
266        let _buf = unsafe { Box::from_raw(core::ptr::slice_from_raw_parts_mut(addr, 0x1000 * 20)) };
267
268        allocator.log_stats();
269    }
270}