holey-bytes/hbvm/src/mem/softpaging/icache.rs

112 lines
3 KiB
Rust
Raw Normal View History

2023-08-17 18:28:02 -05:00
//! Program instruction cache
use crate::mem::Address;
2023-08-17 18:28:02 -05:00
use {
super::{lookup::AddrPageLookuper, paging::PageTable, PageSize},
core::{
mem::{size_of, MaybeUninit},
ptr::{copy_nonoverlapping, NonNull},
},
};
/// Instruction cache
#[derive(Clone, Debug)]
pub struct ICache {
/// Current page address base
base: Address,
2023-08-17 18:28:02 -05:00
/// Curent page pointer
data: Option<NonNull<u8>>,
/// Current page size
size: PageSize,
/// Address mask
mask: u64,
}
impl Default for ICache {
fn default() -> Self {
Self {
base: Address::NULL,
2023-08-17 18:28:02 -05:00
data: Default::default(),
size: PageSize::Size4K,
mask: Default::default(),
}
}
}
impl ICache {
/// Fetch instruction from cache
///
/// # Safety
/// `T` should be valid to read from instruction memory
pub(super) unsafe fn fetch<T>(
&mut self,
addr: Address,
root_pt: *const PageTable,
) -> Option<T> {
2023-08-17 18:28:02 -05:00
let mut ret = MaybeUninit::<T>::uninit();
let pbase = self
.data
2023-11-15 12:03:56 -06:00
.or_else(|| unsafe { self.fetch_page(self.base + self.size, root_pt) })?;
2023-08-17 18:28:02 -05:00
// Get address base
let base = addr.map(|x| x & self.mask);
2023-08-17 18:28:02 -05:00
// Base not matching, fetch anew
if base != self.base {
2023-11-15 12:03:56 -06:00
unsafe { self.fetch_page(base, root_pt) }?;
2023-08-17 18:28:02 -05:00
};
let offset = addr.get() & !self.mask;
2023-08-17 18:28:02 -05:00
let requ_size = size_of::<T>();
// Page overflow
let rem = (offset as usize)
.saturating_add(requ_size)
.saturating_sub(self.size as _);
let first_copy = requ_size.saturating_sub(rem);
// Copy non-overflowing part
2023-11-15 12:03:56 -06:00
unsafe { copy_nonoverlapping(pbase.as_ptr(), ret.as_mut_ptr().cast::<u8>(), first_copy) };
2023-08-17 18:28:02 -05:00
// Copy overflow
if rem != 0 {
2023-11-15 12:03:56 -06:00
let pbase = unsafe { self.fetch_page(self.base + self.size, root_pt) }?;
2023-08-17 18:28:02 -05:00
// Unlikely, unsupported scenario
if rem > self.size as _ {
return None;
}
2023-11-15 12:03:56 -06:00
unsafe {
copy_nonoverlapping(
pbase.as_ptr(),
ret.as_mut_ptr().cast::<u8>().add(first_copy),
rem,
)
};
2023-08-17 18:28:02 -05:00
}
2023-11-15 12:03:56 -06:00
Some(unsafe { ret.assume_init() })
2023-08-17 18:28:02 -05:00
}
/// Fetch a page
unsafe fn fetch_page(&mut self, addr: Address, pt: *const PageTable) -> Option<NonNull<u8>> {
2023-08-17 18:28:02 -05:00
let res = AddrPageLookuper::new(addr, 0, pt).next()?.ok()?;
if !super::perm_check::executable(res.perm) {
return None;
}
(self.size, self.mask) = match res.size {
4096 => (PageSize::Size4K, !((1 << 8) - 1)),
2097152 => (PageSize::Size2M, !((1 << (8 * 2)) - 1)),
1073741824 => (PageSize::Size1G, !((1 << (8 * 3)) - 1)),
_ => return None,
};
self.data = Some(NonNull::new(res.ptr)?);
self.base = addr.map(|x| x & self.mask);
2023-08-17 18:28:02 -05:00
self.data
}
}