openhcl_boot/host_params/dt/
bump_alloc.rs1use crate::boot_logger::log;
10use crate::single_threaded::SingleThreaded;
11use core::alloc::GlobalAlloc;
12use core::alloc::Layout;
13use core::cell::RefCell;
14use memory_range::MemoryRange;
15
16#[cfg_attr(minimal_rt, global_allocator)]
20pub static ALLOCATOR: BumpAllocator = BumpAllocator::new();
21
22#[derive(Debug, PartialEq, Eq)]
23enum State {
24 Allowed,
26 Enabled,
28 Disabled,
30}
31
32#[derive(Debug)]
33pub struct Inner {
34 start: *mut u8,
35 next: *mut u8,
36 end: *mut u8,
37 allow_alloc: State,
38 alloc_count: usize,
39}
40
41pub struct BumpAllocator {
42 inner: SingleThreaded<RefCell<Inner>>,
43}
44
45impl BumpAllocator {
46 pub const fn new() -> Self {
47 BumpAllocator {
48 inner: SingleThreaded(RefCell::new(Inner {
49 start: core::ptr::null_mut(),
50 next: core::ptr::null_mut(),
51 end: core::ptr::null_mut(),
52 allow_alloc: State::Allowed,
53 alloc_count: 0,
54 })),
55 }
56 }
57
58 pub unsafe fn init(&self, mem: MemoryRange) {
65 let mut inner = self.inner.borrow_mut();
66 assert!(
67 inner.start.is_null(),
68 "bump allocator memory range previously set {:#x?}",
69 inner.start
70 );
71
72 inner.start = mem.start() as *mut u8;
73 inner.next = mem.start() as *mut u8;
74 inner.end = mem.end() as *mut u8;
75 }
76
77 pub fn enable_alloc(&self) {
80 let mut inner = self.inner.borrow_mut();
81
82 inner.allow_alloc = match inner.allow_alloc {
83 State::Allowed => State::Enabled,
84 State::Enabled => {
85 panic!("allocations are already enabled");
86 }
87 State::Disabled => {
88 panic!("allocations were previously disabled and cannot be re-enabled");
89 }
90 };
91 }
92
93 pub fn disable_alloc(&self) {
95 let mut inner = self.inner.borrow_mut();
96 inner.allow_alloc = match inner.allow_alloc {
97 State::Allowed => panic!("allocations were never enabled"),
98 State::Enabled => State::Disabled,
99 State::Disabled => {
100 panic!("allocations were previously disabled and cannot be disabled again");
101 }
102 };
103 }
104
105 pub fn log_stats(&self) {
106 let inner = self.inner.borrow();
107
108 let (allocated, free) = unsafe {
112 (
113 inner.next.offset_from(inner.start),
114 inner.end.offset_from(inner.next),
115 )
116 };
117
118 log!(
119 "Bump allocator: allocated {} bytes in {} allocations ({} bytes free)",
120 allocated,
121 inner.alloc_count,
122 free
123 );
124 }
125}
126
127unsafe impl GlobalAlloc for BumpAllocator {
130 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
131 let mut inner = self.inner.borrow_mut();
132
133 if inner.allow_alloc != State::Enabled {
134 panic!("allocations are not allowed {:?}", inner.allow_alloc);
135 }
136
137 let align_offset = inner.next.align_offset(layout.align());
138 let alloc_start = inner.next.wrapping_add(align_offset);
139 let alloc_end = alloc_start.wrapping_add(layout.size());
140
141 if alloc_end < alloc_start {
147 return core::ptr::null_mut();
148 }
149
150 if alloc_end > inner.end {
154 core::ptr::null_mut() } else {
156 inner.next = alloc_end;
157 inner.alloc_count += 1;
158 alloc_start
159 }
160 }
161
162 unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
163 }
166
167 }
171
172#[cfg(nightly)]
173unsafe impl core::alloc::Allocator for &BumpAllocator {
176 fn allocate(
177 &self,
178 layout: Layout,
179 ) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
180 let ptr = unsafe { self.alloc(layout) };
181 if ptr.is_null() {
182 Err(core::alloc::AllocError)
183 } else {
184 unsafe {
185 Ok(core::ptr::NonNull::slice_from_raw_parts(
186 core::ptr::NonNull::new_unchecked(ptr),
187 layout.size(),
188 ))
189 }
190 }
191 }
192
193 unsafe fn deallocate(&self, ptr: core::ptr::NonNull<u8>, layout: Layout) {
194 log!("deallocate called on {:#x?} of size {}", ptr, layout.size());
195 }
196}
197
198#[cfg(nightly)]
199#[cfg(test)]
200mod tests {
201 use super::*;
202
203 #[test]
206 fn test_alloc() {
207 let buffer: Box<[u8]> = Box::new([0; 0x1000 * 20]);
208 let addr = Box::into_raw(buffer) as *mut u8;
209 let allocator = BumpAllocator {
210 inner: SingleThreaded(RefCell::new(Inner {
211 start: addr,
212 next: addr,
213 end: unsafe { addr.add(0x1000 * 20) },
214 allow_alloc: State::Allowed,
215 alloc_count: 0,
216 })),
217 };
218 allocator.enable_alloc();
219
220 unsafe {
221 let ptr1 = allocator.alloc(Layout::from_size_align(100, 8).unwrap());
222 *ptr1 = 42;
223 assert_eq!(*ptr1, 42);
224
225 let ptr2 = allocator.alloc(Layout::from_size_align(200, 16).unwrap());
226 *ptr2 = 55;
227 assert_eq!(*ptr2, 55);
228
229 let ptr3 = allocator.alloc(Layout::from_size_align(300, 32).unwrap());
230 *ptr3 = 77;
231 assert_eq!(*ptr3, 77);
232 }
233
234 {
235 let mut vec: Vec<u8, &BumpAllocator> = Vec::new_in(&allocator);
236
237 for i in 0..4096 {
239 vec.push(i as u8);
240 }
241
242 vec.resize(10000, 0);
244 }
245
246 unsafe {
248 let ptr4 = allocator.alloc(Layout::from_size_align(0x1000 * 20, 8).unwrap());
249 assert!(ptr4.is_null());
250 }
251
252 let _buf = unsafe { Box::from_raw(core::ptr::slice_from_raw_parts_mut(addr, 0x1000 * 20)) };
254
255 allocator.log_stats();
256 }
257}