openhcl_boot/host_params/dt/
bump_alloc.rs1use crate::single_threaded::SingleThreaded;
10use core::alloc::GlobalAlloc;
11use core::alloc::Layout;
12use core::cell::RefCell;
13use memory_range::MemoryRange;
14
15#[cfg_attr(minimal_rt, global_allocator)]
19pub static ALLOCATOR: BumpAllocator = BumpAllocator::new();
20
21#[derive(Debug, PartialEq, Eq)]
22enum State {
23 Allowed,
25 Enabled,
27 Disabled,
29}
30
31#[derive(Debug)]
32pub struct Inner {
33 start: *mut u8,
34 next: *mut u8,
35 end: *mut u8,
36 allow_alloc: State,
37 alloc_count: usize,
38}
39
40pub struct BumpAllocator {
41 inner: SingleThreaded<RefCell<Inner>>,
42}
43
44impl BumpAllocator {
45 pub const fn new() -> Self {
46 BumpAllocator {
47 inner: SingleThreaded(RefCell::new(Inner {
48 start: core::ptr::null_mut(),
49 next: core::ptr::null_mut(),
50 end: core::ptr::null_mut(),
51 allow_alloc: State::Allowed,
52 alloc_count: 0,
53 })),
54 }
55 }
56
57 pub unsafe fn init(&self, mem: MemoryRange) {
64 let mut inner = self.inner.borrow_mut();
65 assert!(
66 inner.start.is_null(),
67 "bump allocator memory range previously set {:#x?}",
68 inner.start
69 );
70
71 inner.start = mem.start() as *mut u8;
72 inner.next = mem.start() as *mut u8;
73 inner.end = mem.end() as *mut u8;
74 }
75
76 fn enable_alloc(&self) {
79 let mut inner = self.inner.borrow_mut();
80
81 inner.allow_alloc = match inner.allow_alloc {
82 State::Allowed => State::Enabled,
83 State::Enabled => {
84 panic!("allocations are already enabled");
85 }
86 State::Disabled => {
87 panic!("allocations were previously disabled and cannot be re-enabled");
88 }
89 };
90 }
91
92 fn disable_alloc(&self) {
94 let mut inner = self.inner.borrow_mut();
95 inner.allow_alloc = match inner.allow_alloc {
96 State::Allowed => panic!("allocations were never enabled"),
97 State::Enabled => State::Disabled,
98 State::Disabled => {
99 panic!("allocations were previously disabled and cannot be disabled again");
100 }
101 };
102 }
103
104 fn log_stats(&self) {
105 let inner = self.inner.borrow();
106
107 let (allocated, free) = unsafe {
111 (
112 inner.next.offset_from(inner.start),
113 inner.end.offset_from(inner.next),
114 )
115 };
116
117 log::info!(
118 "Bump allocator: allocated {} bytes in {} allocations ({} bytes free)",
119 allocated,
120 inner.alloc_count,
121 free
122 );
123 }
124}
125
126pub fn with_global_alloc<T>(f: impl FnOnce() -> T) -> T {
132 ALLOCATOR.enable_alloc();
133 let val = f();
134 ALLOCATOR.disable_alloc();
135 ALLOCATOR.log_stats();
136 val
137}
138
139unsafe impl GlobalAlloc for BumpAllocator {
142 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
143 let mut inner = self.inner.borrow_mut();
144
145 if inner.allow_alloc != State::Enabled {
146 panic!("allocations are not allowed {:?}", inner.allow_alloc);
147 }
148
149 let align_offset = inner.next.align_offset(layout.align());
150 let alloc_start = inner.next.wrapping_add(align_offset);
151 let alloc_end = alloc_start.wrapping_add(layout.size());
152
153 if alloc_end < alloc_start {
159 return core::ptr::null_mut();
160 }
161
162 if alloc_end > inner.end {
166 core::ptr::null_mut() } else {
168 inner.next = alloc_end;
169 inner.alloc_count += 1;
170 alloc_start
171 }
172 }
173
174 unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
175 }
178
179 }
183
184#[cfg(nightly)]
185unsafe impl core::alloc::Allocator for &BumpAllocator {
188 fn allocate(
189 &self,
190 layout: Layout,
191 ) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
192 let ptr = unsafe { self.alloc(layout) };
193 if ptr.is_null() {
194 Err(core::alloc::AllocError)
195 } else {
196 unsafe {
197 Ok(core::ptr::NonNull::slice_from_raw_parts(
198 core::ptr::NonNull::new_unchecked(ptr),
199 layout.size(),
200 ))
201 }
202 }
203 }
204
205 unsafe fn deallocate(&self, ptr: core::ptr::NonNull<u8>, layout: Layout) {
206 log!("deallocate called on {:#x?} of size {}", ptr, layout.size());
207 }
208}
209
210#[cfg(nightly)]
211#[cfg(test)]
212mod tests {
213 use super::*;
214
215 #[test]
218 fn test_alloc() {
219 let buffer: Box<[u8]> = Box::new([0; 0x1000 * 20]);
220 let addr = Box::into_raw(buffer) as *mut u8;
221 let allocator = BumpAllocator {
222 inner: SingleThreaded(RefCell::new(Inner {
223 start: addr,
224 next: addr,
225 end: unsafe { addr.add(0x1000 * 20) },
226 allow_alloc: State::Allowed,
227 alloc_count: 0,
228 })),
229 };
230 allocator.enable_alloc();
231
232 unsafe {
233 let ptr1 = allocator.alloc(Layout::from_size_align(100, 8).unwrap());
234 *ptr1 = 42;
235 assert_eq!(*ptr1, 42);
236
237 let ptr2 = allocator.alloc(Layout::from_size_align(200, 16).unwrap());
238 *ptr2 = 55;
239 assert_eq!(*ptr2, 55);
240
241 let ptr3 = allocator.alloc(Layout::from_size_align(300, 32).unwrap());
242 *ptr3 = 77;
243 assert_eq!(*ptr3, 77);
244 }
245
246 {
247 let mut vec: Vec<u8, &BumpAllocator> = Vec::new_in(&allocator);
248
249 for i in 0..4096 {
251 vec.push(i as u8);
252 }
253
254 vec.resize(10000, 0);
256 }
257
258 unsafe {
260 let ptr4 = allocator.alloc(Layout::from_size_align(0x1000 * 20, 8).unwrap());
261 assert!(ptr4.is_null());
262 }
263
264 let _buf = unsafe { Box::from_raw(core::ptr::slice_from_raw_parts_mut(addr, 0x1000 * 20)) };
266
267 allocator.log_stats();
268 }
269}