Mapping + bye bye memory leaks

This commit is contained in:
Erin 2023-07-20 20:47:50 +02:00
parent ac7fdc7688
commit 6fe1fd91bf
3 changed files with 134 additions and 92 deletions

View file

@ -15,9 +15,29 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
} else {
unsafe {
let mut vm = Vm::<_, 0>::new_unchecked(&prog, TestTrapHandler);
vm.memory.insert_test_page();
let data = {
let ptr = std::alloc::alloc_zeroed(std::alloc::Layout::from_size_align_unchecked(
4096, 4096,
));
if ptr.is_null() {
panic!("Alloc error tbhl");
}
ptr
};
vm.memory
.map(
data,
0,
hbvm::vm::mem::paging::Permission::Write,
PageSize::Size4K,
)
.unwrap();
println!("Program interrupt: {:?}", vm.run());
println!("{:?}", vm.registers);
std::alloc::dealloc(data, std::alloc::Layout::from_size_align_unchecked(4096, 4096));
vm.memory.unmap(0).unwrap();
}
}
Ok(())

View file

@ -56,11 +56,11 @@ impl Memory {
for _ in 0..4 {
let mut pt = Box::<PageTable>::default();
pt[0] = entry;
pt.table[0] = entry;
entry = PtEntry::new(Box::into_raw(pt) as _, Permission::Node);
}
(*self.root_pt)[0] = entry;
(*self.root_pt).table[0] = entry;
}
}
@ -70,17 +70,95 @@ impl Memory {
/// Who knows.
pub unsafe fn map(
&mut self,
mut host: *mut u8,
target: usize,
host: *mut u8,
target: u64,
perm: Permission,
pagesize: PageSize,
count: usize,
) {
todo!()
) -> Result<(), MapError> {
let mut current_pt = self.root_pt;
let lookup_depth = match pagesize {
PageSize::Size4K => 4,
PageSize::Size2M => 3,
PageSize::Size1G => 2,
};
// Lookup pagetable above
for lvl in (0..lookup_depth).rev() {
let entry = (*current_pt)
.table
.get_unchecked_mut(addr_extract_index(target, lvl));
let ptr = entry.ptr();
match entry.permission() {
Permission::Empty => {
(*current_pt).childen += 1;
let table = Box::into_raw(Box::new(paging::PtPointedData {
pt: PageTable::default(),
}));
core::ptr::write(entry, PtEntry::new(table, Permission::Node));
current_pt = table as _;
}
Permission::Node => current_pt = ptr as _,
_ => return Err(MapError::AlreadyMapped),
}
}
// Write entry
(*current_pt).childen += 1;
core::ptr::write(
(*current_pt)
.table
.get_unchecked_mut(addr_extract_index(target, 4 - lookup_depth)),
PtEntry::new(host.cast(), perm),
);
Ok(())
}
/// Unmaps pages from VM's memory
pub fn unmap(&mut self, addr: usize, count: usize) {
todo!()
pub fn unmap(&mut self, addr: u64) -> Result<(), NothingToUnmap> {
let mut current_pt = self.root_pt;
let mut page_tables = [core::ptr::null_mut(); 5];
for lvl in (0..5).rev() {
let entry = unsafe {
(*current_pt)
.table
.get_unchecked_mut(addr_extract_index(addr, lvl))
};
let ptr = entry.ptr();
match entry.permission() {
Permission::Empty => return Err(NothingToUnmap),
Permission::Node => {
page_tables[lvl as usize] = entry;
current_pt = ptr as _
}
_ => unsafe {
core::ptr::write(entry, Default::default());
},
}
}
for entry in page_tables.into_iter() {
if entry.is_null() {
continue;
}
unsafe {
let children = &mut (*(*entry).ptr()).pt.childen;
*children -= 1;
if *children == 0 {
core::mem::drop(Box::from_raw((*entry).ptr() as *mut PageTable));
}
core::ptr::write(entry, Default::default());
}
}
Ok(())
}
/// Load value from an address
@ -338,10 +416,9 @@ impl Iterator for AddrPageLookuper {
for lvl in (0..5).rev() {
// Get an entry
unsafe {
let entry = (*current_pt).get_unchecked(
usize::try_from((self.addr >> (lvl * 9 + 12)) & ((1 << 9) - 1))
.expect("?conradluget a better CPU"),
);
let entry = (*current_pt)
.table
.get_unchecked(addr_extract_index(self.addr, lvl));
let ptr = entry.ptr();
match entry.permission() {
@ -356,7 +433,7 @@ impl Iterator for AddrPageLookuper {
// Node → proceed waking
Permission::Node => current_pt = ptr as _,
// Leaft → return relevant data
// Leaf → return relevant data
perm => {
break 'a (
// Pointer in host memory
@ -386,6 +463,11 @@ impl Iterator for AddrPageLookuper {
}
}
fn addr_extract_index(addr: u64, lvl: u8) -> usize {
debug_assert!(lvl <= 4);
usize::try_from((addr >> (lvl * 9 + 12)) & ((1 << 9) - 1)).expect("?conradluget a better CPU")
}
/// Page size
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PageSize {
@ -401,7 +483,7 @@ pub enum PageSize {
impl PageSize {
/// Convert page table level to size of page
fn from_lvl(lvl: u8) -> Option<Self> {
const fn from_lvl(lvl: u8) -> Option<Self> {
match lvl {
0 => Some(PageSize::Size4K),
1 => Some(PageSize::Size2M),
@ -419,6 +501,9 @@ pub struct LoadError(u64);
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub struct StoreError(u64);
#[derive(Clone, Copy, Display, Debug)]
pub struct NothingToUnmap;
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub enum MemoryAccessReason {
Load,
@ -451,3 +536,8 @@ impl From<StoreError> for VmRunError {
Self::StoreAccessEx(value.0)
}
}
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub enum MapError {
AlreadyMapped,
}

View file

@ -1,14 +1,6 @@
//! Page table and associated structures implementation
use {
core::{
fmt::Debug,
mem::MaybeUninit,
ops::{Index, IndexMut},
slice::SliceIndex,
},
delegate::delegate,
};
use core::{fmt::Debug, mem::MaybeUninit};
/// Page entry permission
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
@ -66,78 +58,18 @@ impl Debug for PtEntry {
/// Page table
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(align(4096))]
pub struct PageTable([PtEntry; 512]);
impl PageTable {
delegate!(to self.0 {
/// Returns a reference to an element or subslice depending on the type of
/// index.
///
/// - If given a position, returns a reference to the element at that
/// position or `None` if out of bounds.
/// - If given a range, returns the subslice corresponding to that range,
/// or `None` if out of bounds.
///
pub fn get<I>(&self, ix: I) -> Option<&I::Output>
where I: SliceIndex<[PtEntry]>;
/// Returns a mutable reference to an element or subslice depending on the
/// type of index (see [`get`]) or `None` if the index is out of bounds.
pub fn get_mut<I>(&mut self, ix: I) -> Option<&mut I::Output>
where I: SliceIndex<[PtEntry]>;
/// Returns a reference to an element or subslice, without doing bounds
/// checking.
///
/// For a safe alternative see [`get`].
///
/// # Safety
///
/// Calling this method with an out-of-bounds index is *[undefined behavior]*
/// even if the resulting reference is not used.
pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where I: SliceIndex<[PtEntry]>;
/// Returns a mutable reference to an element or subslice, without doing
/// bounds checking.
///
/// For a safe alternative see [`get_mut`].
///
/// # Safety
///
/// Calling this method with an out-of-bounds index is *[undefined behavior]*
/// even if the resulting reference is not used.
pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where I: SliceIndex<[PtEntry]>;
});
}
impl<Idx> Index<Idx> for PageTable
where
Idx: SliceIndex<[PtEntry]>,
{
type Output = Idx::Output;
#[inline(always)]
fn index(&self, index: Idx) -> &Self::Output {
&self.0[index]
}
}
impl<Idx> IndexMut<Idx> for PageTable
where
Idx: SliceIndex<[PtEntry]>,
{
#[inline(always)]
fn index_mut(&mut self, index: Idx) -> &mut Self::Output {
&mut self.0[index]
}
pub struct PageTable {
pub childen: u8,
pub table: [PtEntry; 256],
}
impl Default for PageTable {
fn default() -> Self {
// SAFETY: It's fine, zeroed page table entry is valid (= empty)
Self(unsafe { MaybeUninit::zeroed().assume_init() })
Self {
childen: 0,
table: unsafe { MaybeUninit::zeroed().assume_init() },
}
}
}