openhcl_boot/host_params/dt/
bump_alloc.rs1use crate::boot_logger::log;
10use crate::single_threaded::SingleThreaded;
11use core::alloc::GlobalAlloc;
12use core::alloc::Layout;
13use core::cell::RefCell;
14use memory_range::MemoryRange;
15
16#[cfg_attr(minimal_rt, global_allocator)]
20pub static ALLOCATOR: BumpAllocator = BumpAllocator::new();
21
22#[derive(Debug, PartialEq, Eq)]
23enum State {
24 Allowed,
26 Enabled,
28 Disabled,
30}
31
32#[derive(Debug)]
33pub struct Inner {
34 start: *mut u8,
35 next: *mut u8,
36 end: *mut u8,
37 allow_alloc: State,
38 alloc_count: usize,
39}
40
41pub struct BumpAllocator {
42 inner: SingleThreaded<RefCell<Inner>>,
43}
44
45impl BumpAllocator {
46 pub const fn new() -> Self {
47 BumpAllocator {
48 inner: SingleThreaded(RefCell::new(Inner {
49 start: core::ptr::null_mut(),
50 next: core::ptr::null_mut(),
51 end: core::ptr::null_mut(),
52 allow_alloc: State::Allowed,
53 alloc_count: 0,
54 })),
55 }
56 }
57
58 pub unsafe fn init(&self, mem: MemoryRange) {
65 let mut inner = self.inner.borrow_mut();
66 assert!(
67 inner.start.is_null(),
68 "bump allocator memory range previously set {:#x?}",
69 inner.start
70 );
71
72 inner.start = mem.start() as *mut u8;
73 inner.next = mem.start() as *mut u8;
74 inner.end = mem.end() as *mut u8;
75 }
76
77 fn enable_alloc(&self) {
80 let mut inner = self.inner.borrow_mut();
81
82 inner.allow_alloc = match inner.allow_alloc {
83 State::Allowed => State::Enabled,
84 State::Enabled => {
85 panic!("allocations are already enabled");
86 }
87 State::Disabled => {
88 panic!("allocations were previously disabled and cannot be re-enabled");
89 }
90 };
91 }
92
93 fn disable_alloc(&self) {
95 let mut inner = self.inner.borrow_mut();
96 inner.allow_alloc = match inner.allow_alloc {
97 State::Allowed => panic!("allocations were never enabled"),
98 State::Enabled => State::Disabled,
99 State::Disabled => {
100 panic!("allocations were previously disabled and cannot be disabled again");
101 }
102 };
103 }
104
105 fn log_stats(&self) {
106 let inner = self.inner.borrow();
107
108 let (allocated, free) = unsafe {
112 (
113 inner.next.offset_from(inner.start),
114 inner.end.offset_from(inner.next),
115 )
116 };
117
118 log!(
119 "Bump allocator: allocated {} bytes in {} allocations ({} bytes free)",
120 allocated,
121 inner.alloc_count,
122 free
123 );
124 }
125}
126
127pub fn with_global_alloc<T>(f: impl FnOnce() -> T) -> T {
133 ALLOCATOR.enable_alloc();
134 let val = f();
135 ALLOCATOR.disable_alloc();
136 ALLOCATOR.log_stats();
137 val
138}
139
140unsafe impl GlobalAlloc for BumpAllocator {
143 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
144 let mut inner = self.inner.borrow_mut();
145
146 if inner.allow_alloc != State::Enabled {
147 panic!("allocations are not allowed {:?}", inner.allow_alloc);
148 }
149
150 let align_offset = inner.next.align_offset(layout.align());
151 let alloc_start = inner.next.wrapping_add(align_offset);
152 let alloc_end = alloc_start.wrapping_add(layout.size());
153
154 if alloc_end < alloc_start {
160 return core::ptr::null_mut();
161 }
162
163 if alloc_end > inner.end {
167 core::ptr::null_mut() } else {
169 inner.next = alloc_end;
170 inner.alloc_count += 1;
171 alloc_start
172 }
173 }
174
175 unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
176 }
179
180 }
184
185#[cfg(nightly)]
186unsafe impl core::alloc::Allocator for &BumpAllocator {
189 fn allocate(
190 &self,
191 layout: Layout,
192 ) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
193 let ptr = unsafe { self.alloc(layout) };
194 if ptr.is_null() {
195 Err(core::alloc::AllocError)
196 } else {
197 unsafe {
198 Ok(core::ptr::NonNull::slice_from_raw_parts(
199 core::ptr::NonNull::new_unchecked(ptr),
200 layout.size(),
201 ))
202 }
203 }
204 }
205
206 unsafe fn deallocate(&self, ptr: core::ptr::NonNull<u8>, layout: Layout) {
207 log!("deallocate called on {:#x?} of size {}", ptr, layout.size());
208 }
209}
210
211#[cfg(nightly)]
212#[cfg(test)]
213mod tests {
214 use super::*;
215
216 #[test]
219 fn test_alloc() {
220 let buffer: Box<[u8]> = Box::new([0; 0x1000 * 20]);
221 let addr = Box::into_raw(buffer) as *mut u8;
222 let allocator = BumpAllocator {
223 inner: SingleThreaded(RefCell::new(Inner {
224 start: addr,
225 next: addr,
226 end: unsafe { addr.add(0x1000 * 20) },
227 allow_alloc: State::Allowed,
228 alloc_count: 0,
229 })),
230 };
231 allocator.enable_alloc();
232
233 unsafe {
234 let ptr1 = allocator.alloc(Layout::from_size_align(100, 8).unwrap());
235 *ptr1 = 42;
236 assert_eq!(*ptr1, 42);
237
238 let ptr2 = allocator.alloc(Layout::from_size_align(200, 16).unwrap());
239 *ptr2 = 55;
240 assert_eq!(*ptr2, 55);
241
242 let ptr3 = allocator.alloc(Layout::from_size_align(300, 32).unwrap());
243 *ptr3 = 77;
244 assert_eq!(*ptr3, 77);
245 }
246
247 {
248 let mut vec: Vec<u8, &BumpAllocator> = Vec::new_in(&allocator);
249
250 for i in 0..4096 {
252 vec.push(i as u8);
253 }
254
255 vec.resize(10000, 0);
257 }
258
259 unsafe {
261 let ptr4 = allocator.alloc(Layout::from_size_align(0x1000 * 20, 8).unwrap());
262 assert!(ptr4.is_null());
263 }
264
265 let _buf = unsafe { Box::from_raw(core::ptr::slice_from_raw_parts_mut(addr, 0x1000 * 20)) };
267
268 allocator.log_stats();
269 }
270}