1use crate::spec;
7use core::marker::PhantomData;
8use core::mem::size_of;
9use thiserror::Error;
10use zerocopy::FromZeros;
11use zerocopy::IntoBytes;
12
13pub struct Builder<'a, T = (), V = BubbleSortValidator>
21where
22 V: MemoryReservationValidator,
23{
24 inner: Inner<'a>,
25 _phantom: PhantomData<(fn(T), V)>,
26}
27
28struct Inner<'a> {
29 buffer: &'a mut [u8],
30 memory_reservations: &'a [spec::ReserveEntry],
31 memory_reservations_off: usize,
32 string_table_off: usize,
33 string_table_size: usize,
34 string_table_cap: usize,
35 struct_table_off: usize,
36 struct_table_size: usize,
37}
38
39#[derive(Debug, Copy, Clone)]
41pub struct StringId(spec::U32b);
42
43#[derive(Debug, PartialEq, Eq, Error)]
45pub enum Error {
46 #[error("out of space")]
48 OutOfSpace,
49 #[error("memory reservations overlap: {0:?} and {1:?}")]
51 OverlappingMemoryReservations(spec::ReserveEntry, spec::ReserveEntry),
52 #[error("duplicate memory reservation: {0:?}")]
54 DuplicateMemoryReservations(spec::ReserveEntry),
55 #[error("zero-sized memory reservation")]
57 ZeroMemoryReservation,
58}
59
60#[derive(Debug)]
62pub struct Nest<T>(PhantomData<fn(T)>);
63
64pub trait MemoryReservationValidator {
66 fn validate_memory_reservations(
68 memory_reservations: &[spec::ReserveEntry],
69 ) -> Result<(), Error>;
70}
71
72pub struct BubbleSortValidator;
82
83impl MemoryReservationValidator for BubbleSortValidator {
84 fn validate_memory_reservations(
86 memory_reservations: &[spec::ReserveEntry],
87 ) -> Result<(), Error> {
88 let entry_is_empty =
89 |&spec::ReserveEntry { address, size }| u64::from(address) == 0 && u64::from(size) == 0;
90 let validate_entries =
91 |entry1: &spec::ReserveEntry, entry2: &spec::ReserveEntry| -> Result<(), Error> {
92 let base1 = u64::from(entry1.address);
93 let base2 = u64::from(entry2.address);
94 let size1 = u64::from(entry1.size);
95 let size2 = u64::from(entry2.size);
96
97 if base1 == base2 && size1 == size2 {
98 return Err(Error::DuplicateMemoryReservations(*entry1));
99 }
100
101 if base1 < base2 + size2 && base2 < base1 + size1 {
102 return Err(Error::OverlappingMemoryReservations(*entry1, *entry2));
103 }
104
105 Ok(())
106 };
107
108 for (current_idx, entry1) in memory_reservations.iter().enumerate() {
109 if entry_is_empty(entry1) {
110 return Err(Error::ZeroMemoryReservation);
111 }
112 for entry2 in &memory_reservations[current_idx + 1..] {
113 validate_entries(entry1, entry2)?;
114 }
115 }
116
117 Ok(())
118 }
119}
120
121pub struct BuilderConfig<'a> {
126 pub blob_buffer: &'a mut [u8],
128 pub string_table_cap: usize,
130 pub memory_reservations: &'a [spec::ReserveEntry],
134}
135
136impl<'a, V> Builder<'a, (), V>
137where
138 V: MemoryReservationValidator,
139{
140 pub fn new(
142 BuilderConfig {
143 blob_buffer: buffer,
144 string_table_cap,
145 memory_reservations,
146 }: BuilderConfig<'a>,
147 ) -> Result<Self, Error> {
148 V::validate_memory_reservations(memory_reservations)?;
149
150 let memory_reservations_size =
152 (memory_reservations.len() + 1) * size_of::<spec::ReserveEntry>();
153 let memory_reservations_off = size_of::<spec::Header>();
154 let string_table_off = memory_reservations_off + memory_reservations_size;
155 let struct_table_off = string_table_off + string_table_cap;
156 Ok(Self {
157 inner: Inner {
158 buffer,
159 memory_reservations,
160 memory_reservations_off,
161 string_table_size: 0,
162 string_table_cap,
163 string_table_off,
164 struct_table_off,
165 struct_table_size: 0,
166 },
167 _phantom: PhantomData,
168 })
169 }
170
171 pub fn build(mut self, boot_cpuid_phys: u32) -> Result<usize, Error> {
173 self.inner.write_struct(&spec::END.to_be_bytes())?;
175
176 self.inner
178 .memory_reservations
179 .as_bytes()
180 .write_to_prefix(
181 self.inner
182 .buffer
183 .get_mut(
184 self.inner.memory_reservations_off
185 ..self.inner.memory_reservations_off
186 + size_of_val(self.inner.memory_reservations),
187 )
188 .ok_or(Error::OutOfSpace)?,
189 )
190 .map_err(|_| Error::OutOfSpace)?;
191
192 spec::ReserveEntry {
194 address: 0.into(),
195 size: 0.into(),
196 }
197 .write_to_prefix(
198 self.inner
199 .buffer
200 .get_mut(
201 self.inner.string_table_off - size_of::<spec::ReserveEntry>()
202 ..self.inner.string_table_off,
203 )
204 .ok_or(Error::OutOfSpace)?,
205 )
206 .map_err(|_| Error::OutOfSpace)?;
207
208 let total_size = self.inner.struct_table_off + self.inner.struct_table_size;
211 assert!(total_size <= self.inner.buffer.len());
212
213 let header = spec::Header {
214 magic: spec::MAGIC.into(),
215 totalsize: (total_size as u32).into(),
216 off_dt_struct: (self.inner.struct_table_off as u32).into(),
217 off_dt_strings: (self.inner.string_table_off as u32).into(),
218 off_mem_rsvmap: (self.inner.memory_reservations_off as u32).into(),
219 version: spec::CURRENT_VERSION.into(),
220 last_comp_version: spec::COMPAT_VERSION.into(),
221 boot_cpuid_phys: boot_cpuid_phys.into(),
222 size_dt_struct: (self.inner.struct_table_size as u32).into(),
223 size_dt_strings: (self.inner.string_table_size as u32).into(),
224 };
225 header
226 .write_to_prefix(self.inner.buffer)
227 .map_err(|_| Error::OutOfSpace)?;
228
229 Ok(total_size)
230 }
231}
232
233impl<'a, T> Builder<'a, T> {
234 pub fn start_node(mut self, name: &str) -> Result<Builder<'a, Nest<T>>, Error> {
236 self.inner.write_struct(&spec::BEGIN_NODE.to_be_bytes())?;
237 self.inner.write_struct(name.as_bytes())?;
238 self.inner.write_struct(&[0])?;
239 self.inner.align_struct()?;
240 Ok(Builder {
241 inner: self.inner,
242 _phantom: PhantomData,
243 })
244 }
245
246 pub fn add_string(&mut self, s: &str) -> Result<StringId, Error> {
248 let len = s.len() + 1;
249 if self.inner.string_table_size + len > self.inner.string_table_cap {
250 return Err(Error::OutOfSpace);
251 }
252
253 let off = self.inner.string_table_off + self.inner.string_table_size;
254 self.inner.buffer[off..off + s.len()].copy_from_slice(s.as_bytes());
255 self.inner.buffer[off + s.len()] = 0;
256 let id = StringId((self.inner.string_table_size as u32).into());
257 self.inner.string_table_size += len;
258 Ok(id)
259 }
260}
261
262impl<'a, T> Builder<'a, Nest<T>> {
263 pub fn add_null(mut self, name: StringId) -> Result<Self, Error> {
265 self.inner.prop(name, &[])?;
266 Ok(self)
267 }
268
269 pub fn add_u32(mut self, name: StringId, data: u32) -> Result<Self, Error> {
271 self.inner.prop(name, spec::U32b::new(data).as_bytes())?;
272 Ok(self)
273 }
274
275 pub fn add_u64(mut self, name: StringId, data: u64) -> Result<Self, Error> {
277 self.inner.prop(name, spec::U64b::new(data).as_bytes())?;
278 Ok(self)
279 }
280
281 pub fn add_u64_array(mut self, name: StringId, data: &[u64]) -> Result<Self, Error> {
283 let data = data.iter().map(|val| val.to_be_bytes());
284 self.inner.prop_array_iter(name, data)?;
285 Ok(self)
286 }
287
288 pub fn add_u64_list(
290 mut self,
291 name: StringId,
292 data: impl IntoIterator<Item = u64>,
293 ) -> Result<Self, Error> {
294 let data = data.into_iter().map(|val| val.to_be_bytes());
295 self.inner.prop_array_iter(name, data)?;
296 Ok(self)
297 }
298
299 pub fn add_u32_array(mut self, name: StringId, data: &[u32]) -> Result<Self, Error> {
301 let data = data.iter().map(|val| val.to_be_bytes());
302 self.inner.prop_array_iter(name, data)?;
303 Ok(self)
304 }
305
306 pub fn add_prop_array(mut self, name: StringId, data_array: &[&[u8]]) -> Result<Self, Error> {
309 self.inner.prop_array_iter(name, data_array.iter())?;
310 Ok(self)
311 }
312
313 pub fn add_str(mut self, name: StringId, data: &str) -> Result<Self, Error> {
315 self.inner
316 .prop_array_iter(name, [data.as_bytes(), &[0]].iter())?;
317 Ok(self)
318 }
319
320 pub fn add_str_array(mut self, name: StringId, strings: &[&str]) -> Result<Self, Error> {
322 self.inner
323 .prop_array_iter(name, StringBytesWithZeroIter::new(strings))?;
324 Ok(self)
325 }
326
327 pub fn end_node(mut self) -> Result<Builder<'a, T>, Error> {
329 self.inner.write_struct(&spec::END_NODE.to_be_bytes())?;
330 Ok(Builder {
331 inner: self.inner,
332 _phantom: PhantomData,
333 })
334 }
335}
336
337impl Inner<'_> {
338 fn write_struct(&mut self, b: &[u8]) -> Result<usize, Error> {
339 let off = self.struct_table_off + self.struct_table_size;
340 self.buffer
341 .get_mut(off..off + b.len())
342 .ok_or(Error::OutOfSpace)?
343 .copy_from_slice(b);
344 self.struct_table_size += b.len();
345 Ok(off)
346 }
347
348 fn align_struct(&mut self) -> Result<(), Error> {
349 let new_size = (self.struct_table_size + 3) & !3;
350 if self.struct_table_off + new_size <= self.buffer.len() {
351 self.struct_table_size = new_size;
352 Ok(())
353 } else {
354 Err(Error::OutOfSpace)
355 }
356 }
357
358 fn prop_array_iter(
363 &mut self,
364 name: StringId,
365 data_iter: impl Iterator<Item = impl AsRef<[u8]>>,
366 ) -> Result<(), Error> {
367 self.write_struct(&spec::PROP.to_be_bytes())?;
368 let header_offset = self.write_struct(spec::PropHeader::new_zeroed().as_bytes())?;
369 let mut n = 0;
370 for data in data_iter {
371 let data = data.as_ref();
372 n += data.len();
373 self.write_struct(data)?;
374 }
375 spec::PropHeader {
377 len: (n as u32).into(),
378 nameoff: name.0,
379 }
380 .write_to_prefix(&mut self.buffer[header_offset..])
381 .unwrap(); self.align_struct()?;
383 Ok(())
384 }
385
386 fn prop(&mut self, name: StringId, data: &[u8]) -> Result<(), Error> {
388 self.prop_array_iter(name, [data].iter())?;
389 Ok(())
390 }
391}
392
393#[derive(Clone)]
394struct StringBytesWithZeroIter<'a> {
395 strings: core::slice::Iter<'a, &'a str>,
396 send_zero: bool,
397}
398
399impl<'a> StringBytesWithZeroIter<'a> {
400 fn new(strings: &'a [&'a str]) -> Self {
401 Self {
402 strings: strings.iter(),
403 send_zero: strings.is_empty(),
404 }
405 }
406}
407
408impl<'a> Iterator for StringBytesWithZeroIter<'a> {
409 type Item = &'a [u8];
410
411 fn next(&mut self) -> Option<Self::Item> {
412 if self.send_zero {
413 self.send_zero = false;
414 return Some(&[0]);
415 }
416
417 if let Some(s) = self.strings.next() {
418 self.send_zero = true;
419 return Some(s.as_bytes());
420 }
421
422 None
423 }
424}
425
426#[cfg(test)]
427mod tests {
428 use super::*;
429
430 fn create_entry(address: u64, size: u64) -> spec::ReserveEntry {
431 spec::ReserveEntry {
432 address: address.into(),
433 size: size.into(),
434 }
435 }
436
437 #[test]
438 fn test_overlapping_memory_reservations() {
439 let entry1 = create_entry(100, 50);
440 let entry2 = create_entry(120, 30);
441 let reservations = [entry1, entry2];
442
443 assert_eq!(
444 BubbleSortValidator::validate_memory_reservations(&reservations),
445 Err(Error::OverlappingMemoryReservations(entry1, entry2))
446 );
447 }
448
449 #[test]
450 fn test_duplicate_memory_reservations() {
451 let entry1 = create_entry(100, 50);
452 let entry2 = create_entry(100, 50);
453 let reservations = [entry1, entry2];
454
455 assert_eq!(
456 BubbleSortValidator::validate_memory_reservations(&reservations),
457 Err(Error::DuplicateMemoryReservations(entry1))
458 );
459 }
460
461 #[test]
462 fn test_zero_memory_reservation() {
463 let entry = create_entry(0, 0);
464 let reservations = [entry];
465
466 assert_eq!(
467 BubbleSortValidator::validate_memory_reservations(&reservations),
468 Err(Error::ZeroMemoryReservation)
469 );
470 }
471
472 #[test]
473 fn test_valid_memory_reservations() {
474 let entry1 = create_entry(100, 50);
475 let entry2 = create_entry(200, 50);
476 let reservations = [entry1, entry2];
477
478 assert_eq!(
479 BubbleSortValidator::validate_memory_reservations(&reservations),
480 Ok(())
481 );
482 }
483
484 #[test]
485 fn test_valid_adjacent_memory_reservations() {
486 let entry1 = create_entry(100, 50);
487 let entry2 = create_entry(150, 50);
488 let reservations = [entry1, entry2];
489
490 assert_eq!(
491 BubbleSortValidator::validate_memory_reservations(&reservations),
492 Ok(())
493 );
494 }
495}