openhcl_boot/host_params/dt/
bump_alloc.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! A simple bump allocator that can be used in the bootloader.
5//!
6//! Note that we only allow allocations in a small window for supporting
7//! mesh_protobuf. Any other attempts to allocate will result in a panic.
8
9use crate::single_threaded::SingleThreaded;
10use core::alloc::GlobalAlloc;
11use core::alloc::Layout;
12use core::cell::RefCell;
13use memory_range::MemoryRange;
14
15// Only enable the bump allocator when compiling with minimal_rt, as otherwise
16// it will override the global allocator in unit tests which is not what we
17// want.
18#[cfg_attr(minimal_rt, global_allocator)]
19pub static ALLOCATOR: BumpAllocator = BumpAllocator::new();
20
21#[derive(Debug, PartialEq, Eq)]
22enum State {
23    /// Allocations can be enabled via `enable_alloc`.
24    Allowed,
25    /// Allocations are currently enabled.
26    Enabled,
27    /// Allocations are disabled and cannot be enabled again.
28    Disabled,
29}
30
31#[derive(Debug)]
32pub struct Inner {
33    start: *mut u8,
34    next: *mut u8,
35    end: *mut u8,
36    allow_alloc: State,
37    alloc_count: usize,
38}
39
40pub struct BumpAllocator {
41    inner: SingleThreaded<RefCell<Inner>>,
42}
43
44impl BumpAllocator {
45    pub const fn new() -> Self {
46        BumpAllocator {
47            inner: SingleThreaded(RefCell::new(Inner {
48                start: core::ptr::null_mut(),
49                next: core::ptr::null_mut(),
50                end: core::ptr::null_mut(),
51                allow_alloc: State::Allowed,
52                alloc_count: 0,
53            })),
54        }
55    }
56
57    /// Initialize the bump allocator with the specified memory range.
58    ///
59    /// # Safety
60    ///
61    /// The caller must guarantee that the memory range is both valid to
62    /// access via the current pagetable identity map, and that it is unused.
63    pub unsafe fn init(&self, mem: MemoryRange) {
64        let mut inner = self.inner.borrow_mut();
65        assert!(
66            inner.start.is_null(),
67            "bump allocator memory range previously set {:#x?}",
68            inner.start
69        );
70
71        inner.start = mem.start() as *mut u8;
72        inner.next = mem.start() as *mut u8;
73        inner.end = mem.end() as *mut u8;
74    }
75
76    /// Enable allocations. This panics if allocations were ever previously
77    /// enabled.
78    fn enable_alloc(&self) {
79        let mut inner = self.inner.borrow_mut();
80
81        inner.allow_alloc = match inner.allow_alloc {
82            State::Allowed => State::Enabled,
83            State::Enabled => {
84                panic!("allocations are already enabled");
85            }
86            State::Disabled => {
87                panic!("allocations were previously disabled and cannot be re-enabled");
88            }
89        };
90    }
91
92    /// Disable allocations. Panics if the allocator was not previously enabled.
93    fn disable_alloc(&self) {
94        let mut inner = self.inner.borrow_mut();
95        inner.allow_alloc = match inner.allow_alloc {
96            State::Allowed => panic!("allocations were never enabled"),
97            State::Enabled => State::Disabled,
98            State::Disabled => {
99                panic!("allocations were previously disabled and cannot be disabled again");
100            }
101        };
102    }
103
104    fn log_stats(&self) {
105        let inner = self.inner.borrow();
106
107        // SAFETY: The pointers are within the same original allocation,
108        // specified by init. They are u8 pointers, so there is no alignment
109        // requirement.
110        let (allocated, free) = unsafe {
111            (
112                inner.next.offset_from(inner.start),
113                inner.end.offset_from(inner.next),
114            )
115        };
116
117        log::info!(
118            "Bump allocator: allocated {} bytes in {} allocations ({} bytes free)",
119            allocated,
120            inner.alloc_count,
121            free
122        );
123    }
124}
125
126/// Run the provided closure with enabling the global bump allocator. This is
127/// only intended to be used for mesh_protobuf decode.
128///
129/// Note that if the global allocator was ever used before, this function will
130/// panic.
131pub fn with_global_alloc<T>(f: impl FnOnce() -> T) -> T {
132    ALLOCATOR.enable_alloc();
133    let val = f();
134    ALLOCATOR.disable_alloc();
135    ALLOCATOR.log_stats();
136    val
137}
138
139// SAFETY: The allocator points to a valid identity VA range via the
140// construction at init.
141unsafe impl GlobalAlloc for BumpAllocator {
142    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
143        let mut inner = self.inner.borrow_mut();
144
145        if inner.allow_alloc != State::Enabled {
146            panic!("allocations are not allowed {:?}", inner.allow_alloc);
147        }
148
149        let align_offset = inner.next.align_offset(layout.align());
150        let alloc_start = inner.next.wrapping_add(align_offset);
151        let alloc_end = alloc_start.wrapping_add(layout.size());
152
153        // If end overflowed this allocation is too large. If start overflowed,
154        // end will also overflow.
155        //
156        // Rust `Layout` guarantees that the size is not larger than `isize`,
157        // so it's not possible to wrap around twice.
158        if alloc_end < alloc_start {
159            return core::ptr::null_mut();
160        }
161
162        // TODO: renable allocation tracing when we support tracing levels via
163        // the log crate.
164
165        if alloc_end > inner.end {
166            core::ptr::null_mut() // out of memory
167        } else {
168            inner.next = alloc_end;
169            inner.alloc_count += 1;
170            alloc_start
171        }
172    }
173
174    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
175        // TODO: renable allocation tracing when we support tracing levels via
176        // the log crate.
177    }
178
179    // TODO: consider implementing realloc for the Vec grow case, which is the
180    // main usecase we see. This would mean supporting realloc if the allocation
181    // being realloced was the last one aka the tail.
182}
183
184#[cfg(nightly)]
185// SAFETY: The allocator points to a valid identity VA range via the
186// construction at init, the same as for `GlobalAlloc`.
187unsafe impl core::alloc::Allocator for &BumpAllocator {
188    fn allocate(
189        &self,
190        layout: Layout,
191    ) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
192        let ptr = unsafe { self.alloc(layout) };
193        if ptr.is_null() {
194            Err(core::alloc::AllocError)
195        } else {
196            unsafe {
197                Ok(core::ptr::NonNull::slice_from_raw_parts(
198                    core::ptr::NonNull::new_unchecked(ptr),
199                    layout.size(),
200                ))
201            }
202        }
203    }
204
205    unsafe fn deallocate(&self, ptr: core::ptr::NonNull<u8>, layout: Layout) {
206        log!("deallocate called on {:#x?} of size {}", ptr, layout.size());
207    }
208}
209
210#[cfg(nightly)]
211#[cfg(test)]
212mod tests {
213    use super::*;
214
215    // NOTE: run these tests with miri via
216    // `RUSTFLAGS="--cfg nightly" cargo +nightly miri test -p openhcl_boot`
217    #[test]
218    fn test_alloc() {
219        let buffer: Box<[u8]> = Box::new([0; 0x1000 * 20]);
220        let addr = Box::into_raw(buffer) as *mut u8;
221        let allocator = BumpAllocator {
222            inner: SingleThreaded(RefCell::new(Inner {
223                start: addr,
224                next: addr,
225                end: unsafe { addr.add(0x1000 * 20) },
226                allow_alloc: State::Allowed,
227                alloc_count: 0,
228            })),
229        };
230        allocator.enable_alloc();
231
232        unsafe {
233            let ptr1 = allocator.alloc(Layout::from_size_align(100, 8).unwrap());
234            *ptr1 = 42;
235            assert_eq!(*ptr1, 42);
236
237            let ptr2 = allocator.alloc(Layout::from_size_align(200, 16).unwrap());
238            *ptr2 = 55;
239            assert_eq!(*ptr2, 55);
240
241            let ptr3 = allocator.alloc(Layout::from_size_align(300, 32).unwrap());
242            *ptr3 = 77;
243            assert_eq!(*ptr3, 77);
244        }
245
246        {
247            let mut vec: Vec<u8, &BumpAllocator> = Vec::new_in(&allocator);
248
249            // Push 4096 bytes, which should force a vec realloc.
250            for i in 0..4096 {
251                vec.push(i as u8);
252            }
253
254            // force an explicit resize to 10000 bytes
255            vec.resize(10000, 0);
256        }
257
258        // Attempt to allocate a large chunk that is not available.
259        unsafe {
260            let ptr4 = allocator.alloc(Layout::from_size_align(0x1000 * 20, 8).unwrap());
261            assert!(ptr4.is_null());
262        }
263
264        // Recreate the box, then drop it so miri is satisfied.
265        let _buf = unsafe { Box::from_raw(core::ptr::slice_from_raw_parts_mut(addr, 0x1000 * 20)) };
266
267        allocator.log_stats();
268    }
269}