holey-bytes/vm/src/mem/softpaging/icache.rs

108 lines
2.9 KiB
Rust
Raw Normal View History

2023-08-17 18:28:02 -05:00
//! Program instruction cache
use {
super::{lookup::AddrPageLookuper, paging::PageTable, PageSize},
2024-07-08 00:22:53 -05:00
crate::mem::Address,
2023-08-17 18:28:02 -05:00
core::{
mem::{size_of, MaybeUninit},
ptr::{copy_nonoverlapping, NonNull},
},
};
/// Instruction cache
#[derive(Clone, Debug)]
pub struct ICache {
/// Current page address base
base: Address,
2023-08-17 18:28:02 -05:00
/// Curent page pointer
data: Option<NonNull<u8>>,
/// Current page size
size: PageSize,
/// Address mask
mask: u64,
}
impl Default for ICache {
fn default() -> Self {
Self {
base: Address::NULL,
2023-08-17 18:28:02 -05:00
data: Default::default(),
size: PageSize::Size4K,
mask: Default::default(),
}
}
}
impl ICache {
/// Fetch instruction from cache
///
/// # Safety
/// `T` should be valid to read from instruction memory
pub(super) unsafe fn fetch<T>(
&mut self,
addr: Address,
root_pt: *const PageTable,
) -> Option<T> {
2023-08-17 18:28:02 -05:00
let mut ret = MaybeUninit::<T>::uninit();
2024-07-08 00:22:53 -05:00
let pbase =
self.data.or_else(|| unsafe { self.fetch_page(self.base + self.size, root_pt) })?;
2023-08-17 18:28:02 -05:00
// Get address base
let base = addr.map(|x| x & self.mask);
2023-08-17 18:28:02 -05:00
// Base not matching, fetch anew
if base != self.base {
2023-11-15 12:03:56 -06:00
unsafe { self.fetch_page(base, root_pt) }?;
2023-08-17 18:28:02 -05:00
};
let offset = addr.get() & !self.mask;
2023-08-17 18:28:02 -05:00
let requ_size = size_of::<T>();
// Page overflow
2024-07-08 00:22:53 -05:00
let rem = (offset as usize).saturating_add(requ_size).saturating_sub(self.size as _);
2023-08-17 18:28:02 -05:00
let first_copy = requ_size.saturating_sub(rem);
// Copy non-overflowing part
2023-11-15 12:03:56 -06:00
unsafe { copy_nonoverlapping(pbase.as_ptr(), ret.as_mut_ptr().cast::<u8>(), first_copy) };
2023-08-17 18:28:02 -05:00
// Copy overflow
if rem != 0 {
2023-11-15 12:03:56 -06:00
let pbase = unsafe { self.fetch_page(self.base + self.size, root_pt) }?;
2023-08-17 18:28:02 -05:00
// Unlikely, unsupported scenario
if rem > self.size as _ {
return None;
}
2023-11-15 12:03:56 -06:00
unsafe {
copy_nonoverlapping(
pbase.as_ptr(),
ret.as_mut_ptr().cast::<u8>().add(first_copy),
rem,
)
};
2023-08-17 18:28:02 -05:00
}
2023-11-15 12:03:56 -06:00
Some(unsafe { ret.assume_init() })
2023-08-17 18:28:02 -05:00
}
/// Fetch a page
unsafe fn fetch_page(&mut self, addr: Address, pt: *const PageTable) -> Option<NonNull<u8>> {
2023-08-17 18:28:02 -05:00
let res = AddrPageLookuper::new(addr, 0, pt).next()?.ok()?;
if !super::perm_check::executable(res.perm) {
return None;
}
(self.size, self.mask) = match res.size {
4096 => (PageSize::Size4K, !((1 << 8) - 1)),
2097152 => (PageSize::Size2M, !((1 << (8 * 2)) - 1)),
1073741824 => (PageSize::Size1G, !((1 << (8 * 3)) - 1)),
_ => return None,
};
self.data = Some(NonNull::new(res.ptr)?);
self.base = addr.map(|x| x & self.mask);
2023-08-17 18:28:02 -05:00
self.data
}
}