Fixed memory (un)mapping

replace/247b2c6614ab81925e96744398632e3b7bb8b7ad
Erin 2023-07-26 12:22:28 +02:00
parent 19df4538d7
commit 1ed153a9a2
2 changed files with 39 additions and 22 deletions

View File

@ -10,26 +10,42 @@ use {
fuzz_target!(|data: &[u8]| { fuzz_target!(|data: &[u8]| {
if let Ok(mut vm) = Vm::<_, 16384>::new_validated(data, TestTrapHandler, Default::default()) { if let Ok(mut vm) = Vm::<_, 16384>::new_validated(data, TestTrapHandler, Default::default()) {
let page = Box::into_raw(Box::<Page>::default()); // Alloc and map some memory
let pages = [
unsafe { alloc_and_map(&mut vm.memory, 0),
vm.memory alloc_and_map(&mut vm.memory, 4096),
.map( ];
page.cast(),
0,
hbvm::mem::paging::Permission::Write,
PageSize::Size4K,
)
.unwrap()
};
// Run VM
let _ = vm.run(); let _ = vm.run();
vm.memory.unmap(0).unwrap(); // Unmap and dealloc the memory
let _ = unsafe { Box::from_raw(page) }; for (i, page) in pages.into_iter().enumerate() {
unmap_and_dealloc(&mut vm.memory, page, i as u64 * 4096);
}
} }
}); });
fn alloc_and_map(memory: &mut Memory, at: u64) -> *mut u8 {
let ptr = Box::into_raw(Box::<Page>::default()).cast();
unsafe {
memory
.map(
ptr,
at,
hbvm::mem::paging::Permission::Write,
PageSize::Size4K,
)
.unwrap()
};
ptr
}
fn unmap_and_dealloc(memory: &mut Memory, ptr: *mut u8, from: u64) {
memory.unmap(from).unwrap();
let _ = unsafe { Box::from_raw(ptr.cast::<Page>()) };
}
#[repr(align(4096))] #[repr(align(4096))]
struct Page([u8; 4096]); struct Page([u8; 4096]);
impl Default for Page { impl Default for Page {

View File

@ -1,7 +1,7 @@
//! Program memory implementation //! Program memory implementation
pub mod paging;
pub mod bmc; pub mod bmc;
pub mod paging;
mod pfhandler; mod pfhandler;
@ -58,13 +58,13 @@ impl Memory {
// Decide on what level depth are we going // Decide on what level depth are we going
let lookup_depth = match pagesize { let lookup_depth = match pagesize {
PageSize::Size4K => 4, PageSize::Size4K => 0,
PageSize::Size2M => 3, PageSize::Size2M => 1,
PageSize::Size1G => 2, PageSize::Size1G => 2,
}; };
// Walk pagetable levels // Walk pagetable levels
for lvl in (0..lookup_depth).rev() { for lvl in (lookup_depth..5).rev() {
let entry = (*current_pt) let entry = (*current_pt)
.table .table
.get_unchecked_mut(addr_extract_index(target, lvl)); .get_unchecked_mut(addr_extract_index(target, lvl));
@ -94,7 +94,7 @@ impl Memory {
let node = (*current_pt) let node = (*current_pt)
.table .table
.get_unchecked_mut(addr_extract_index(target, 4 - lookup_depth)); .get_unchecked_mut(addr_extract_index(target, lookup_depth));
// Check if node is not mapped // Check if node is not mapped
if node.permission() != Permission::Empty { if node.permission() != Permission::Empty {
@ -114,6 +114,7 @@ impl Memory {
/// just should be ignored. /// just should be ignored.
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub fn unmap(&mut self, addr: u64) -> Result<(), NothingToUnmap> { pub fn unmap(&mut self, addr: u64) -> Result<(), NothingToUnmap> {
extern crate std;
let mut current_pt = self.root_pt; let mut current_pt = self.root_pt;
let mut page_tables = [core::ptr::null_mut(); 5]; let mut page_tables = [core::ptr::null_mut(); 5];
@ -152,9 +153,7 @@ impl Memory {
unsafe { unsafe {
let children = &mut (*(*entry).ptr()).pt.childen; let children = &mut (*(*entry).ptr()).pt.childen;
*children -= 1; // Decrease children count
// Decrease children count
*children -= 1;
// If there are no children, deallocate. // If there are no children, deallocate.
if *children == 0 { if *children == 0 {
@ -162,6 +161,8 @@ impl Memory {
// Zero visited entry // Zero visited entry
core::ptr::write_bytes(entry, 0, 1); core::ptr::write_bytes(entry, 0, 1);
} else {
break;
} }
} }
} }