forked from AbleOS/holey-bytes
BMC is now interruptable
This commit is contained in:
parent
6588837769
commit
bdda987da9
|
@ -9,7 +9,7 @@ use {
|
|||
};
|
||||
|
||||
fuzz_target!(|data: &[u8]| {
|
||||
if let Ok(mut vm) = Vm::<_, 0>::new_validated(data, TestTrapHandler, Default::default()) {
|
||||
if let Ok(mut vm) = Vm::<_, 100>::new_validated(data, TestTrapHandler, Default::default()) {
|
||||
let _ = vm.run();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -19,13 +19,12 @@ pub mod mem;
|
|||
pub mod value;
|
||||
|
||||
use {
|
||||
self::{mem::HandlePageFault, value::ValueVariant},
|
||||
core::{cmp::Ordering, ops},
|
||||
core::{cmp::Ordering, mem::size_of, ops},
|
||||
hbbytecode::{
|
||||
valider, OpParam, ParamBB, ParamBBB, ParamBBBB, ParamBBD, ParamBBDH, ParamBBW, ParamBD,
|
||||
},
|
||||
mem::Memory,
|
||||
value::Value,
|
||||
mem::{bmc::BlockCopier, HandlePageFault, Memory},
|
||||
value::{Value, ValueVariant},
|
||||
};
|
||||
|
||||
/// HoleyBytes Virtual Machine
|
||||
|
@ -53,6 +52,9 @@ pub struct Vm<'a, PfHandler, const TIMER_QUOTIENT: usize> {
|
|||
|
||||
/// Program timer
|
||||
timer: usize,
|
||||
|
||||
/// Saved block copier
|
||||
copier: Option<BlockCopier>,
|
||||
}
|
||||
|
||||
impl<'a, PfHandler: HandlePageFault, const TIMER_QUOTIENT: usize>
|
||||
|
@ -71,6 +73,7 @@ impl<'a, PfHandler: HandlePageFault, const TIMER_QUOTIENT: usize>
|
|||
program_len: program.len() - 12,
|
||||
program,
|
||||
timer: 0,
|
||||
copier: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -255,13 +258,41 @@ impl<'a, PfHandler: HandlePageFault, const TIMER_QUOTIENT: usize>
|
|||
}
|
||||
BMC => {
|
||||
// Block memory copy
|
||||
let ParamBBD(src, dst, count) = self.decode();
|
||||
self.memory.block_copy(
|
||||
self.read_reg(src).cast::<u64>(),
|
||||
self.read_reg(dst).cast::<u64>(),
|
||||
count as _,
|
||||
&mut self.pfhandler,
|
||||
)?;
|
||||
match if let Some(copier) = &mut self.copier {
|
||||
// There is some copier, poll.
|
||||
copier.poll(&mut self.memory, &mut self.pfhandler)
|
||||
} else {
|
||||
// There is none, make one!
|
||||
let ParamBBD(src, dst, count) = self.decode();
|
||||
|
||||
// So we are still on BMC on next cycle
|
||||
self.pc -= size_of::<ParamBBD>() + 1;
|
||||
|
||||
self.copier = Some(BlockCopier::new(
|
||||
self.read_reg(src).cast(),
|
||||
self.read_reg(dst).cast(),
|
||||
count as _,
|
||||
));
|
||||
|
||||
self.copier
|
||||
.as_mut()
|
||||
.unwrap_unchecked() // SAFETY: We just assigned there
|
||||
.poll(&mut self.memory, &mut self.pfhandler)
|
||||
} {
|
||||
// We are done, shift program counter
|
||||
core::task::Poll::Ready(Ok(())) => {
|
||||
self.copier = None;
|
||||
self.pc += size_of::<ParamBBD>() + 1;
|
||||
}
|
||||
// Error, shift program counter (for consistency)
|
||||
// and yield error
|
||||
core::task::Poll::Ready(Err(e)) => {
|
||||
self.pc += size_of::<ParamBBD>() + 1;
|
||||
return Err(e.into());
|
||||
}
|
||||
// Not done yet, proceed to next cycle
|
||||
core::task::Poll::Pending => (),
|
||||
}
|
||||
}
|
||||
BRC => {
|
||||
// Block register copy
|
||||
|
@ -353,7 +384,7 @@ impl<'a, PfHandler: HandlePageFault, const TIMER_QUOTIENT: usize>
|
|||
#[inline]
|
||||
unsafe fn decode<T: OpParam>(&mut self) -> T {
|
||||
let data = self.program.as_ptr().add(self.pc + 1).cast::<T>().read();
|
||||
self.pc += 1 + core::mem::size_of::<T>();
|
||||
self.pc += 1 + size_of::<T>();
|
||||
data
|
||||
}
|
||||
|
||||
|
|
152
hbvm/src/mem/bmc.rs
Normal file
152
hbvm/src/mem/bmc.rs
Normal file
|
@ -0,0 +1,152 @@
|
|||
use {
|
||||
super::MemoryAccessReason,
|
||||
crate::{
|
||||
mem::{perm_check, HandlePageFault, Memory},
|
||||
VmRunError,
|
||||
},
|
||||
core::{mem::MaybeUninit, task::Poll},
|
||||
};
|
||||
|
||||
// Buffer size (defaults to 4 KiB, a smallest page size on most platforms)
|
||||
const BUF_SIZE: usize = 4096;
|
||||
|
||||
// This should be equal to `BUF_SIZE`
|
||||
#[repr(align(4096))]
|
||||
struct AlignedBuf([MaybeUninit<u8>; BUF_SIZE]);
|
||||
|
||||
pub struct BlockCopier {
|
||||
/// Source address
|
||||
src: u64,
|
||||
/// Destination address
|
||||
dst: u64,
|
||||
/// How many buffer sizes to copy?
|
||||
n_buffers: usize,
|
||||
/// …and what remainds after?
|
||||
rem: usize,
|
||||
}
|
||||
|
||||
impl BlockCopier {
|
||||
pub fn new(src: u64, dst: u64, count: usize) -> Self {
|
||||
Self {
|
||||
src,
|
||||
dst,
|
||||
n_buffers: count / BUF_SIZE,
|
||||
rem: count % BUF_SIZE,
|
||||
}
|
||||
}
|
||||
|
||||
/// Copy one block
|
||||
///
|
||||
/// # Safety
|
||||
/// - Same as for [`Memory::load`] and [`Memory::store`]
|
||||
pub unsafe fn poll(
|
||||
&mut self,
|
||||
memory: &mut Memory,
|
||||
traph: &mut impl HandlePageFault,
|
||||
) -> Poll<Result<(), BlkCopyError>> {
|
||||
// Safety: Assuming uninit of array of MaybeUninit is sound
|
||||
let mut buf = AlignedBuf(MaybeUninit::uninit().assume_init());
|
||||
|
||||
if self.n_buffers != 0 {
|
||||
if let Err(e) = act(
|
||||
memory,
|
||||
self.src,
|
||||
self.dst,
|
||||
buf.0.as_mut_ptr().cast(),
|
||||
BUF_SIZE,
|
||||
traph,
|
||||
) {
|
||||
return Poll::Ready(Err(e));
|
||||
}
|
||||
|
||||
self.src += BUF_SIZE as u64;
|
||||
self.dst += BUF_SIZE as u64;
|
||||
self.n_buffers -= 1;
|
||||
|
||||
return if self.n_buffers + self.rem == 0 {
|
||||
// If there is nothing left, we are done
|
||||
Poll::Ready(Ok(()))
|
||||
} else {
|
||||
// Otherwise let's advice to run it again
|
||||
Poll::Pending
|
||||
};
|
||||
}
|
||||
|
||||
if self.rem != 0 {
|
||||
if let Err(e) = act(
|
||||
memory,
|
||||
self.src,
|
||||
self.dst,
|
||||
buf.0.as_mut_ptr().cast(),
|
||||
self.rem,
|
||||
traph,
|
||||
) {
|
||||
return Poll::Ready(Err(e));
|
||||
}
|
||||
}
|
||||
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn act(
|
||||
memory: &mut Memory,
|
||||
src: u64,
|
||||
dst: u64,
|
||||
buf: *mut u8,
|
||||
count: usize,
|
||||
traph: &mut impl HandlePageFault,
|
||||
) -> Result<(), BlkCopyError> {
|
||||
// Load to buffer
|
||||
memory
|
||||
.memory_access(
|
||||
MemoryAccessReason::Load,
|
||||
src,
|
||||
buf,
|
||||
count,
|
||||
perm_check::readable,
|
||||
|src, dst, count| core::ptr::copy(src, dst, count),
|
||||
traph,
|
||||
)
|
||||
.map_err(|addr| BlkCopyError {
|
||||
access_reason: MemoryAccessReason::Load,
|
||||
addr,
|
||||
})?;
|
||||
|
||||
// Store from buffer
|
||||
memory
|
||||
.memory_access(
|
||||
MemoryAccessReason::Store,
|
||||
dst,
|
||||
buf,
|
||||
count,
|
||||
perm_check::writable,
|
||||
|dst, src, count| core::ptr::copy(src, dst, count),
|
||||
traph,
|
||||
)
|
||||
.map_err(|addr| BlkCopyError {
|
||||
access_reason: MemoryAccessReason::Store,
|
||||
addr,
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Error occured when copying a block of memory
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct BlkCopyError {
|
||||
/// Kind of access
|
||||
access_reason: MemoryAccessReason,
|
||||
/// VM Address
|
||||
addr: u64,
|
||||
}
|
||||
|
||||
impl From<BlkCopyError> for VmRunError {
|
||||
fn from(value: BlkCopyError) -> Self {
|
||||
match value.access_reason {
|
||||
MemoryAccessReason::Load => Self::LoadAccessEx(value.addr),
|
||||
MemoryAccessReason::Store => Self::StoreAccessEx(value.addr),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
//! Program memory implementation
|
||||
|
||||
pub mod paging;
|
||||
pub mod bmc;
|
||||
|
||||
mod pfhandler;
|
||||
|
||||
|
@ -8,7 +9,6 @@ pub use pfhandler::HandlePageFault;
|
|||
|
||||
use {
|
||||
super::VmRunError,
|
||||
core::mem::MaybeUninit,
|
||||
derive_more::Display,
|
||||
paging::{PageTable, Permission},
|
||||
};
|
||||
|
@ -215,93 +215,6 @@ impl Memory {
|
|||
.map_err(StoreError)
|
||||
}
|
||||
|
||||
/// Copy a block of memory
|
||||
///
|
||||
/// # Safety
|
||||
/// - Same as for [`Self::load`] and [`Self::store`]
|
||||
/// - This function has been rewritten and is now pretty much boring
|
||||
pub unsafe fn block_copy(
|
||||
&mut self,
|
||||
mut src: u64,
|
||||
mut dst: u64,
|
||||
count: usize,
|
||||
traph: &mut impl HandlePageFault,
|
||||
) -> Result<(), BlkCopyError> {
|
||||
// Yea, i know it is possible to do this more efficiently, but I am too lazy.
|
||||
|
||||
impl Memory {
|
||||
#[inline]
|
||||
unsafe fn act(
|
||||
&mut self,
|
||||
src: u64,
|
||||
dst: u64,
|
||||
buf: *mut u8,
|
||||
count: usize,
|
||||
traph: &mut impl HandlePageFault,
|
||||
) -> Result<(), BlkCopyError> {
|
||||
// Load to buffer
|
||||
self.memory_access(
|
||||
MemoryAccessReason::Load,
|
||||
src,
|
||||
buf,
|
||||
count,
|
||||
perm_check::readable,
|
||||
|src, dst, count| core::ptr::copy(src, dst, count),
|
||||
traph,
|
||||
)
|
||||
.map_err(|addr| BlkCopyError {
|
||||
access_reason: MemoryAccessReason::Load,
|
||||
addr,
|
||||
})?;
|
||||
|
||||
// Store from buffer
|
||||
self.memory_access(
|
||||
MemoryAccessReason::Store,
|
||||
dst,
|
||||
buf,
|
||||
count,
|
||||
perm_check::writable,
|
||||
|dst, src, count| core::ptr::copy(src, dst, count),
|
||||
traph,
|
||||
)
|
||||
.map_err(|addr| BlkCopyError {
|
||||
access_reason: MemoryAccessReason::Store,
|
||||
addr,
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Buffer size (defaults to 4 KiB, a smallest page size on most platforms)
|
||||
const BUF_SIZE: usize = 4096;
|
||||
|
||||
// This should be equal to `BUF_SIZE`
|
||||
#[repr(align(4096))]
|
||||
struct AlignedBuf([MaybeUninit<u8>; BUF_SIZE]);
|
||||
|
||||
// Safety: Assuming uninit of array of MaybeUninit is sound
|
||||
let mut buf = AlignedBuf(MaybeUninit::uninit().assume_init());
|
||||
|
||||
// Calculate how many times we need to copy buffer-sized blocks if any and the rest.
|
||||
let n_buffers = count / BUF_SIZE;
|
||||
let rem = count % BUF_SIZE;
|
||||
|
||||
// Copy buffer-sized blocks
|
||||
for _ in 0..n_buffers {
|
||||
self.act(src, dst, buf.0.as_mut_ptr().cast(), BUF_SIZE, traph)?;
|
||||
src += BUF_SIZE as u64;
|
||||
dst += BUF_SIZE as u64;
|
||||
}
|
||||
|
||||
// Copy the rest (if any)
|
||||
if rem != 0 {
|
||||
self.act(src, dst, buf.0.as_mut_ptr().cast(), rem, traph)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Everyone behold, the holy function, the god of HBVM memory accesses!
|
||||
|
||||
/// Split address to pages, check their permissions and feed pointers with offset
|
||||
|
@ -534,24 +447,6 @@ pub enum MemoryAccessReason {
|
|||
Store,
|
||||
}
|
||||
|
||||
/// Error occured when copying a block of memory
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct BlkCopyError {
|
||||
/// Kind of access
|
||||
access_reason: MemoryAccessReason,
|
||||
/// VM Address
|
||||
addr: u64,
|
||||
}
|
||||
|
||||
impl From<BlkCopyError> for VmRunError {
|
||||
fn from(value: BlkCopyError) -> Self {
|
||||
match value.access_reason {
|
||||
MemoryAccessReason::Load => Self::LoadAccessEx(value.addr),
|
||||
MemoryAccessReason::Store => Self::StoreAccessEx(value.addr),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LoadError> for VmRunError {
|
||||
fn from(value: LoadError) -> Self {
|
||||
Self::LoadAccessEx(value.0)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
hex_literal_case = "Upper"
|
||||
imports_granularity = "One"
|
||||
struct_field_align_threshold = 5
|
||||
enum_discrim_align_threshold = 5
|
||||
struct_field_align_threshold = 8
|
||||
enum_discrim_align_threshold = 8
|
Loading…
Reference in a new issue