1#![expect(unsafe_code)]
8#![expect(missing_docs)]
9#![expect(clippy::undocumented_unsafe_blocks, clippy::missing_safety_doc)]
10
11pub mod alloc;
12mod trycopy_windows_arm64;
13mod trycopy_windows_x64;
14pub mod unix;
15pub mod windows;
16
17pub use sys::AsMappableRef;
18pub use sys::Mappable;
19pub use sys::MappableRef;
20pub use sys::SparseMapping;
21pub use sys::alloc_shared_memory;
22pub use sys::new_mappable_from_file;
23
24use std::mem::MaybeUninit;
25use std::sync::atomic::AtomicU8;
26use thiserror::Error;
27#[cfg(unix)]
28use unix as sys;
29#[cfg(windows)]
30use windows as sys;
31use zerocopy::FromBytes;
32use zerocopy::Immutable;
33use zerocopy::IntoBytes;
34use zerocopy::KnownLayout;
35
36pub fn initialize_try_copy() {
38 #[cfg(unix)]
39 {
40 static INIT: std::sync::Once = std::sync::Once::new();
41 INIT.call_once(|| unsafe {
42 let err = install_signal_handlers();
43 if err != 0 {
44 panic!(
45 "could not install signal handlers: {}",
46 std::io::Error::from_raw_os_error(err)
47 )
48 }
49 });
50 }
51}
52
53unsafe extern "C" {
54 #[cfg(unix)]
55 fn install_signal_handlers() -> i32;
56
57 fn try_memmove(
58 dest: *mut u8,
59 src: *const u8,
60 length: usize,
61 failure: *mut AccessFailure,
62 ) -> i32;
63 fn try_memset(dest: *mut u8, c: i32, length: usize, failure: *mut AccessFailure) -> i32;
64 fn try_cmpxchg8(
65 dest: *mut u8,
66 expected: &mut u8,
67 desired: u8,
68 failure: *mut AccessFailure,
69 ) -> i32;
70 fn try_cmpxchg16(
71 dest: *mut u16,
72 expected: &mut u16,
73 desired: u16,
74 failure: *mut AccessFailure,
75 ) -> i32;
76 fn try_cmpxchg32(
77 dest: *mut u32,
78 expected: &mut u32,
79 desired: u32,
80 failure: *mut AccessFailure,
81 ) -> i32;
82 fn try_cmpxchg64(
83 dest: *mut u64,
84 expected: &mut u64,
85 desired: u64,
86 failure: *mut AccessFailure,
87 ) -> i32;
88 fn try_read8(dest: *mut u8, src: *const u8, failure: *mut AccessFailure) -> i32;
89 fn try_read16(dest: *mut u16, src: *const u16, failure: *mut AccessFailure) -> i32;
90 fn try_read32(dest: *mut u32, src: *const u32, failure: *mut AccessFailure) -> i32;
91 fn try_read64(dest: *mut u64, src: *const u64, failure: *mut AccessFailure) -> i32;
92 fn try_write8(dest: *mut u8, value: u8, failure: *mut AccessFailure) -> i32;
93 fn try_write16(dest: *mut u16, value: u16, failure: *mut AccessFailure) -> i32;
94 fn try_write32(dest: *mut u32, value: u32, failure: *mut AccessFailure) -> i32;
95 fn try_write64(dest: *mut u64, value: u64, failure: *mut AccessFailure) -> i32;
96}
97
98#[repr(C)]
99struct AccessFailure {
100 address: *mut u8,
101 #[cfg(unix)]
102 si_signo: i32,
103 #[cfg(unix)]
104 si_code: i32,
105}
106
107#[derive(Debug, Error)]
108#[error("failed to {} memory", if self.is_write { "write" } else { "read" })]
109pub struct MemoryError {
110 offset: usize,
111 is_write: bool,
112 #[source]
113 source: OsAccessError,
114}
115
116#[derive(Debug, Error)]
117enum OsAccessError {
118 #[cfg(windows)]
119 #[error("access violation")]
120 AccessViolation,
121 #[cfg(unix)]
122 #[error("SIGSEGV (si_code = {0:x}")]
123 Sigsegv(u32),
124 #[cfg(unix)]
125 #[error("SIGSEGV (si_code = {0:x}")]
126 Sigbus(u32),
127}
128
129impl MemoryError {
130 fn new(src: Option<*const u8>, dest: *mut u8, len: usize, failure: &AccessFailure) -> Self {
131 let (offset, is_write) = if failure.address.is_null() {
132 (0, src.is_none())
134 } else if (dest..dest.wrapping_add(len)).contains(&failure.address) {
135 (failure.address as usize - dest as usize, true)
136 } else if let Some(src) = src {
137 if (src..src.wrapping_add(len)).contains(&failure.address.cast_const()) {
138 (failure.address as usize - src as usize, false)
139 } else {
140 panic!(
141 "invalid failure address: {:p} src: {:p} dest: {:p} len: {:#x}",
142 failure.address, src, dest, len
143 );
144 }
145 } else {
146 panic!(
147 "invalid failure address: {:p} src: None dest: {:p} len: {:#x}",
148 failure.address, dest, len
149 );
150 };
151 #[cfg(windows)]
152 let source = OsAccessError::AccessViolation;
153 #[cfg(unix)]
154 let source = match failure.si_signo {
155 libc::SIGSEGV => OsAccessError::Sigsegv(failure.si_code as u32),
156 libc::SIGBUS => OsAccessError::Sigbus(failure.si_code as u32),
157 _ => {
158 panic!(
159 "unexpected signal: {} src: {:?} dest: {:p} len: {:#x}",
160 failure.si_signo, src, dest, len
161 );
162 }
163 };
164 Self {
165 offset,
166 is_write,
167 source,
168 }
169 }
170
171 pub fn offset(&self) -> usize {
174 self.offset
175 }
176}
177
178pub unsafe fn try_copy<T>(src: *const T, dest: *mut T, count: usize) -> Result<(), MemoryError> {
195 let mut failure = MaybeUninit::uninit();
196 let ret = unsafe {
198 try_memmove(
199 dest.cast::<u8>(),
200 src.cast::<u8>(),
201 count * size_of::<T>(),
202 failure.as_mut_ptr(),
203 )
204 };
205 match ret {
206 0 => Ok(()),
207 _ => Err(MemoryError::new(
208 Some(src.cast()),
209 dest.cast(),
210 count,
211 unsafe { failure.assume_init_ref() },
213 )),
214 }
215}
216
217pub unsafe fn try_write_bytes<T>(dest: *mut T, val: u8, count: usize) -> Result<(), MemoryError> {
234 let mut failure = MaybeUninit::uninit();
235 let ret = unsafe {
237 try_memset(
238 dest.cast::<u8>(),
239 val.into(),
240 count * size_of::<T>(),
241 failure.as_mut_ptr(),
242 )
243 };
244 match ret {
245 0 => Ok(()),
246 _ => Err(MemoryError::new(
247 None,
248 dest.cast(),
249 count,
250 unsafe { failure.assume_init_ref() },
252 )),
253 }
254}
255
256pub unsafe fn try_compare_exchange<T: IntoBytes + FromBytes + Immutable + KnownLayout>(
276 dest: *mut T,
277 mut current: T,
278 new: T,
279) -> Result<Result<T, T>, MemoryError> {
280 let mut failure = MaybeUninit::uninit();
281 let ret = unsafe {
283 match size_of::<T>() {
284 1 => try_cmpxchg8(
285 dest.cast(),
286 std::mem::transmute::<&mut T, &mut u8>(&mut current),
287 std::mem::transmute_copy::<T, u8>(&new),
288 failure.as_mut_ptr(),
289 ),
290 2 => try_cmpxchg16(
291 dest.cast(),
292 std::mem::transmute::<&mut T, &mut u16>(&mut current),
293 std::mem::transmute_copy::<T, u16>(&new),
294 failure.as_mut_ptr(),
295 ),
296 4 => try_cmpxchg32(
297 dest.cast(),
298 std::mem::transmute::<&mut T, &mut u32>(&mut current),
299 std::mem::transmute_copy::<T, u32>(&new),
300 failure.as_mut_ptr(),
301 ),
302 8 => try_cmpxchg64(
303 dest.cast(),
304 std::mem::transmute::<&mut T, &mut u64>(&mut current),
305 std::mem::transmute_copy::<T, u64>(&new),
306 failure.as_mut_ptr(),
307 ),
308 _ => panic!("unsupported size"),
309 }
310 };
311 match ret {
312 n if n > 0 => Ok(Ok(new)),
313 0 => Ok(Err(current)),
314 _ => Err(MemoryError::new(
315 None,
316 dest.cast(),
317 size_of::<T>(),
318 unsafe { failure.assume_init_ref() },
320 )),
321 }
322}
323
324pub unsafe fn try_compare_exchange_ref<
345 T: IntoBytes + FromBytes + Immutable + KnownLayout + ?Sized,
346>(
347 dest: *mut u8,
348 current: &mut T,
349 new: &T,
350) -> Result<bool, MemoryError> {
351 let mut failure = MaybeUninit::uninit();
352 let ret = unsafe {
354 match (size_of_val(current), size_of_val(new)) {
355 (1, 1) => try_cmpxchg8(
356 dest,
357 &mut *current.as_mut_bytes().as_mut_ptr(),
358 new.as_bytes()[0],
359 failure.as_mut_ptr(),
360 ),
361 (2, 2) => try_cmpxchg16(
362 dest.cast(),
363 &mut *current.as_mut_bytes().as_mut_ptr().cast(),
364 u16::from_ne_bytes(new.as_bytes().try_into().unwrap()),
365 failure.as_mut_ptr(),
366 ),
367 (4, 4) => try_cmpxchg32(
368 dest.cast(),
369 &mut *current.as_mut_bytes().as_mut_ptr().cast(),
370 u32::from_ne_bytes(new.as_bytes().try_into().unwrap()),
371 failure.as_mut_ptr(),
372 ),
373 (8, 8) => try_cmpxchg64(
374 dest.cast(),
375 &mut *current.as_mut_bytes().as_mut_ptr().cast(),
376 u64::from_ne_bytes(new.as_bytes().try_into().unwrap()),
377 failure.as_mut_ptr(),
378 ),
379 _ => panic!("unsupported or mismatched size"),
380 }
381 };
382 if ret < 0 {
383 return Err(MemoryError::new(
384 None,
385 dest.cast(),
386 size_of_val(current),
387 unsafe { failure.assume_init_ref() },
389 ));
390 }
391 Ok(ret > 0)
392}
393
394pub unsafe fn try_read_volatile<T: FromBytes + Immutable + KnownLayout>(
412 src: *const T,
413) -> Result<T, MemoryError> {
414 let mut dest = MaybeUninit::<T>::uninit();
415 let mut failure = MaybeUninit::uninit();
416 let ret = unsafe {
418 match size_of::<T>() {
419 1 => try_read8(dest.as_mut_ptr().cast(), src.cast(), failure.as_mut_ptr()),
420 2 => try_read16(dest.as_mut_ptr().cast(), src.cast(), failure.as_mut_ptr()),
421 4 => try_read32(dest.as_mut_ptr().cast(), src.cast(), failure.as_mut_ptr()),
422 8 => try_read64(dest.as_mut_ptr().cast(), src.cast(), failure.as_mut_ptr()),
423 _ => panic!("unsupported size"),
424 }
425 };
426 match ret {
427 0 => {
428 Ok(unsafe { dest.assume_init() })
430 }
431 _ => Err(MemoryError::new(
432 Some(src.cast()),
433 dest.as_mut_ptr().cast(),
434 size_of::<T>(),
435 unsafe { failure.assume_init_ref() },
437 )),
438 }
439}
440
441pub unsafe fn try_write_volatile<T: IntoBytes + Immutable + KnownLayout>(
459 dest: *mut T,
460 value: &T,
461) -> Result<(), MemoryError> {
462 let mut failure = MaybeUninit::uninit();
463 let ret = unsafe {
465 match size_of::<T>() {
466 1 => try_write8(
467 dest.cast(),
468 std::mem::transmute_copy(value),
469 failure.as_mut_ptr(),
470 ),
471 2 => try_write16(
472 dest.cast(),
473 std::mem::transmute_copy(value),
474 failure.as_mut_ptr(),
475 ),
476 4 => try_write32(
477 dest.cast(),
478 std::mem::transmute_copy(value),
479 failure.as_mut_ptr(),
480 ),
481 8 => try_write64(
482 dest.cast(),
483 std::mem::transmute_copy(value),
484 failure.as_mut_ptr(),
485 ),
486 _ => panic!("unsupported size"),
487 }
488 };
489 match ret {
490 0 => Ok(()),
491 _ => Err(MemoryError::new(
492 None,
493 dest.cast(),
494 size_of::<T>(),
495 unsafe { failure.assume_init_ref() },
497 )),
498 }
499}
500
501#[derive(Debug, Error)]
502pub enum SparseMappingError {
503 #[error("out of bounds")]
504 OutOfBounds,
505 #[error(transparent)]
506 Memory(MemoryError),
507}
508
509impl SparseMapping {
510 pub fn page_size() -> usize {
512 sys::page_size()
513 }
514
515 pub fn write_at(&self, offset: usize, data: &[u8]) -> Result<(), SparseMappingError> {
517 assert!(self.is_local(), "cannot write to remote mappings");
518
519 if self.len() < offset || self.len() - offset < data.len() {
520 return Err(SparseMappingError::OutOfBounds);
521 }
522 unsafe {
524 let dest = self.as_ptr().cast::<u8>().add(offset);
525 try_copy(data.as_ptr(), dest, data.len()).map_err(SparseMappingError::Memory)
526 }
527 }
528
529 pub fn read_at(&self, offset: usize, data: &mut [u8]) -> Result<(), SparseMappingError> {
531 assert!(self.is_local(), "cannot read from remote mappings");
532
533 if self.len() < offset || self.len() - offset < data.len() {
534 return Err(SparseMappingError::OutOfBounds);
535 }
536 unsafe {
538 let src = (self.as_ptr() as *const u8).add(offset);
539 try_copy(src, data.as_mut_ptr(), data.len()).map_err(SparseMappingError::Memory)
540 }
541 }
542
543 pub fn read_plain<T: FromBytes + Immutable + KnownLayout>(
545 &self,
546 offset: usize,
547 ) -> Result<T, SparseMappingError> {
548 let mut obj = MaybeUninit::<T>::uninit();
549 unsafe {
551 self.read_at(
552 offset,
553 std::slice::from_raw_parts_mut(obj.as_mut_ptr().cast::<u8>(), size_of::<T>()),
554 )?;
555 }
556 Ok(unsafe { obj.assume_init() })
558 }
559
560 pub fn fill_at(&self, offset: usize, val: u8, len: usize) -> Result<(), SparseMappingError> {
562 assert!(self.is_local(), "cannot fill remote mappings");
563
564 if self.len() < offset || self.len() - offset < len {
565 return Err(SparseMappingError::OutOfBounds);
566 }
567 unsafe {
569 let dest = self.as_ptr().cast::<u8>().add(offset);
570 try_write_bytes(dest, val, len).map_err(SparseMappingError::Memory)
571 }
572 }
573
574 pub fn atomic_slice(&self, start: usize, len: usize) -> &[AtomicU8] {
586 assert!(self.len() >= start && self.len() - start >= len);
587 unsafe { std::slice::from_raw_parts((self.as_ptr() as *const AtomicU8).add(start), len) }
589 }
590}
591
592#[cfg(test)]
593mod tests {
594 use super::*;
595
596 #[derive(Copy, Clone, Debug)]
597 enum Primitive {
598 Read,
599 Write,
600 CompareAndSwap,
601 }
602
603 #[repr(u32)]
604 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
605 enum Size {
606 Bit8 = 8,
607 Bit16 = 16,
608 Bit32 = 32,
609 Bit64 = 64,
610 }
611
612 fn test_unsafe_primitive(primitive: Primitive, size: Size) {
613 let mut dest = !0u64;
618 let dest_addr = std::ptr::from_mut(&mut dest).cast::<()>();
619 let src = 0x5555_5555_5555_5555u64;
620 let src_addr = std::ptr::from_ref(&src).cast::<()>();
621 let bad_addr_mut = 0x100 as *mut (); let bad_addr = bad_addr_mut.cast_const();
623 let nonsense_addr = !0u64 as *mut ();
624 let expected = if size != Size::Bit64 {
625 dest.wrapping_shl(size as u32) | src.wrapping_shr(64 - (size as u32))
626 } else {
627 src
628 };
629 let mut af = AccessFailure {
630 address: nonsense_addr.cast(),
631 #[cfg(unix)]
632 si_signo: 0,
633 #[cfg(unix)]
634 si_code: 0,
635 };
636 let af_addr = &mut af as *mut _;
637
638 let res = unsafe {
639 match size {
640 Size::Bit8 => match primitive {
641 Primitive::Read => try_read8(dest_addr.cast(), src_addr.cast(), af_addr),
642 Primitive::Write => try_write8(dest_addr.cast(), src as u8, af_addr),
643 Primitive::CompareAndSwap => {
644 1 - try_cmpxchg8(dest_addr.cast(), &mut (dest as u8), src as u8, af_addr)
645 }
646 },
647 Size::Bit16 => match primitive {
648 Primitive::Read => try_read16(dest_addr.cast(), src_addr.cast(), af_addr),
649 Primitive::Write => try_write16(dest_addr.cast(), src as u16, af_addr),
650 Primitive::CompareAndSwap => {
651 1 - try_cmpxchg16(dest_addr.cast(), &mut (dest as u16), src as u16, af_addr)
652 }
653 },
654 Size::Bit32 => match primitive {
655 Primitive::Read => try_read32(dest_addr.cast(), src_addr.cast(), af_addr),
656 Primitive::Write => try_write32(dest_addr.cast(), src as u32, af_addr),
657 Primitive::CompareAndSwap => {
658 1 - try_cmpxchg32(dest_addr.cast(), &mut (dest as u32), src as u32, af_addr)
659 }
660 },
661 Size::Bit64 => match primitive {
662 Primitive::Read => try_read64(dest_addr.cast(), src_addr.cast(), af_addr),
663 Primitive::Write => try_write64(dest_addr.cast(), src, af_addr),
664 Primitive::CompareAndSwap => {
665 1 - try_cmpxchg64(dest_addr.cast(), &mut { dest }, src, af_addr)
666 }
667 },
668 }
669 };
670 assert_eq!(
671 dest, expected,
672 "Expected value must match the result for {primitive:?} and {size:?}"
673 );
674 assert_eq!(
675 res, 0,
676 "Success should be returned for {primitive:?} and {size:?}"
677 );
678 assert_eq!(
679 af.address,
680 nonsense_addr.cast(),
681 "Fault address must not be set for {primitive:?} and {size:?}"
682 );
683
684 let res = unsafe {
685 match size {
686 Size::Bit8 => match primitive {
687 Primitive::Read => try_read8(dest_addr.cast(), bad_addr.cast(), af_addr),
688 Primitive::Write => try_write8(bad_addr_mut.cast(), src as u8, af_addr),
689 Primitive::CompareAndSwap => {
690 try_cmpxchg8(bad_addr_mut.cast(), &mut (dest as u8), src as u8, af_addr)
691 }
692 },
693 Size::Bit16 => match primitive {
694 Primitive::Read => try_read16(dest_addr.cast(), bad_addr.cast(), af_addr),
695 Primitive::Write => try_write16(bad_addr_mut.cast(), src as u16, af_addr),
696 Primitive::CompareAndSwap => {
697 try_cmpxchg16(bad_addr_mut.cast(), &mut (dest as u16), src as u16, af_addr)
698 }
699 },
700 Size::Bit32 => match primitive {
701 Primitive::Read => try_read32(dest_addr.cast(), bad_addr.cast(), af_addr),
702 Primitive::Write => try_write32(bad_addr_mut.cast(), src as u32, af_addr),
703 Primitive::CompareAndSwap => {
704 try_cmpxchg32(bad_addr_mut.cast(), &mut (dest as u32), src as u32, af_addr)
705 }
706 },
707 Size::Bit64 => match primitive {
708 Primitive::Read => try_read64(dest_addr.cast(), bad_addr.cast(), af_addr),
709 Primitive::Write => try_write64(bad_addr_mut.cast(), src, af_addr),
710 Primitive::CompareAndSwap => {
711 try_cmpxchg64(bad_addr_mut.cast(), &mut { dest }, src, af_addr)
712 }
713 },
714 }
715 };
716 assert_eq!(
717 dest, expected,
718 "Fault preserved source and destination for {primitive:?} and {size:?}"
719 );
720 assert_eq!(
721 res, -1,
722 "Error code must be returned for {primitive:?} and {size:?}"
723 );
724 assert_eq!(
725 af.address,
726 bad_addr_mut.cast(),
727 "Fault address must be set for {primitive:?} and {size:?}"
728 );
729 }
730
731 #[test]
732 fn test_unsafe_primitives() {
733 initialize_try_copy();
734
735 for primitive in [Primitive::Read, Primitive::Write, Primitive::CompareAndSwap] {
736 for size in [Size::Bit8, Size::Bit16, Size::Bit32, Size::Bit64] {
737 test_unsafe_primitive(primitive, size);
738 }
739 }
740 }
741
742 static BUF: [u8; 65536] = [0xcc; 65536];
743
744 fn test_with(range_size: usize) {
745 let page_size = SparseMapping::page_size();
746
747 let mapping = SparseMapping::new(range_size).unwrap();
748 mapping.alloc(page_size, page_size).unwrap();
749 let slice = unsafe {
750 std::slice::from_raw_parts_mut(mapping.as_ptr().add(page_size).cast::<u8>(), page_size)
751 };
752 slice.copy_from_slice(&BUF[..page_size]);
753 mapping.unmap(page_size, page_size).unwrap();
754
755 mapping.alloc(range_size - page_size, page_size).unwrap();
756 let slice = unsafe {
757 std::slice::from_raw_parts_mut(
758 mapping.as_ptr().add(range_size - page_size).cast::<u8>(),
759 page_size,
760 )
761 };
762 slice.copy_from_slice(&BUF[..page_size]);
763 mapping.unmap(range_size - page_size, page_size).unwrap();
764 drop(mapping);
765 }
766
767 #[test]
768 fn test_sparse_mapping() {
769 test_with(0x100000);
770 test_with(0x200000);
771 test_with(0x200000 + SparseMapping::page_size());
772 test_with(0x40000000);
773 test_with(0x40000000 + SparseMapping::page_size());
774 }
775
776 #[test]
777 fn test_try_copy() {
778 initialize_try_copy();
779
780 let mapping = SparseMapping::new(2 * 1024 * 1024).unwrap();
781 let page_size = SparseMapping::page_size();
782 mapping.alloc(page_size, page_size).unwrap();
783 let base = mapping.as_ptr().cast::<u8>();
784 unsafe {
785 try_copy(BUF.as_ptr(), base, 100).unwrap_err();
786 try_copy(BUF.as_ptr(), base.add(page_size), 100).unwrap();
787 try_copy(BUF.as_ptr(), base.add(page_size), page_size + 1).unwrap_err();
788 }
789 }
790
791 #[test]
792 fn test_cmpxchg() {
793 initialize_try_copy();
794
795 let page_size = SparseMapping::page_size();
796 let mapping = SparseMapping::new(page_size * 2).unwrap();
797 mapping.alloc(0, page_size).unwrap();
798 let base = mapping.as_ptr().cast::<u8>();
799 unsafe {
800 assert_eq!(try_compare_exchange(base.add(8), 0, 1).unwrap().unwrap(), 1);
801 assert_eq!(
802 try_compare_exchange(base.add(8), 0, 2)
803 .unwrap()
804 .unwrap_err(),
805 1
806 );
807 assert_eq!(
808 try_compare_exchange(base.cast::<u64>().add(1), 1, 2)
809 .unwrap()
810 .unwrap(),
811 2
812 );
813 assert!(try_compare_exchange_ref(base.add(8), &mut [2u8, 0], &[3, 0]).unwrap());
814 try_compare_exchange(base.add(page_size), 0, 2).unwrap_err();
815 }
816 }
817
818 #[test]
819 fn test_overlapping_mappings() {
820 #![expect(clippy::identity_op)]
821
822 let page_size = SparseMapping::page_size();
823 let mapping = SparseMapping::new(0x10 * page_size).unwrap();
824 mapping.alloc(0x1 * page_size, 0x4 * page_size).unwrap();
825 mapping.alloc(0x1 * page_size, 0x2 * page_size).unwrap();
826 mapping.alloc(0x2 * page_size, 0x3 * page_size).unwrap();
827 mapping.alloc(0, 0x10 * page_size).unwrap();
828 mapping.alloc(0x8 * page_size, 0x8 * page_size).unwrap();
829 mapping.unmap(0xc * page_size, 0x2 * page_size).unwrap();
830 mapping.alloc(0x9 * page_size, 0x4 * page_size).unwrap();
831 mapping.unmap(0x3 * page_size, 0xb * page_size).unwrap();
832
833 mapping.alloc(0x5 * page_size, 0x4 * page_size).unwrap();
834 mapping.alloc(0x6 * page_size, 0x2 * page_size).unwrap();
835 mapping.alloc(0x6 * page_size, 0x1 * page_size).unwrap();
836 mapping.alloc(0x4 * page_size, 0x3 * page_size).unwrap();
837
838 let shmem = alloc_shared_memory(0x4 * page_size).unwrap();
839 mapping
840 .map_file(0x5 * page_size, 0x4 * page_size, &shmem, 0, true)
841 .unwrap();
842 mapping
843 .map_file(0x6 * page_size, 0x2 * page_size, &shmem, 0, true)
844 .unwrap();
845 mapping
846 .map_file(0x6 * page_size, 0x1 * page_size, &shmem, 0, true)
847 .unwrap();
848 mapping
849 .map_file(0x4 * page_size, 0x3 * page_size, &shmem, 0, true)
850 .unwrap();
851
852 drop(mapping);
853 }
854}