Block memory copy

This commit is contained in:
Erin 2023-06-25 00:07:53 +02:00 committed by ondra05
parent 6356b7dd24
commit 498e729c90
2 changed files with 119 additions and 64 deletions

View file

@ -1,7 +1,9 @@
mod paging;
use core::mem::MaybeUninit;
use self::paging::{PageTable, Permission, PtEntry};
use super::trap::HandleTrap;
use super::{trap::HandleTrap, VmRunError};
use alloc::boxed::Box;
use derive_more::Display;
@ -105,37 +107,80 @@ impl Memory {
}
/// Copy a block of memory
pub unsafe fn block_copy(&mut self, src: u64, dst: u64, count: u64) -> Result<(), ()> {
/* let count = usize::try_from(count).expect("?conradluget a better CPU");
///
/// # Safety
/// - Same as for [`Self::load`] and [`Self::store`]
/// - Your faith in the gods of UB
/// - Addr-san claims it's fine but who knows is she isn't lying :ferrisSus:
pub unsafe fn block_copy(
&mut self,
src: u64,
dst: u64,
count: usize,
traph: &mut impl HandleTrap,
) -> Result<(), BlkCopyError> {
// Yea, i know it is possible to do this more efficiently, but I am too lazy.
let mut srcs = PageSplitter::new(src, count, self.root_pt);
let mut dsts = PageSplitter::new(dst, count, self.root_pt);
let mut c_src = srcs.next().ok_or(())?;
let mut c_dst = dsts.next().ok_or(())?;
const STACK_BUFFER_SIZE: usize = 512;
loop {
let min_size = c_src.size.min(c_dst.size);
// Decide if to use stack-allocated buffer or to heap allocate
// Deallocation is again decided on size at the end of the function
let mut buf = MaybeUninit::<[u8; STACK_BUFFER_SIZE]>::uninit();
let buf = if count <= STACK_BUFFER_SIZE {
buf.as_mut_ptr().cast()
} else {
unsafe {
core::ptr::copy(c_src.ptr, c_dst.ptr, min_size);
}
let layout = core::alloc::Layout::from_size_align_unchecked(count, 1);
let ptr = alloc::alloc::alloc(layout);
if ptr.is_null() {
alloc::alloc::handle_alloc_error(layout);
}
match (
match c_src.size.saturating_sub(min_size) {
0 => srcs.next(),
size => Some(PageSplitResult { size, ..c_src }),
},
match c_dst.size.saturating_sub(min_size) {
0 => dsts.next(),
size => Some(PageSplitResult { size, ..c_dst }),
},
) {
(None, None) => return Ok(()),
(Some(src), Some(dst)) => (c_src, c_dst) = (src, dst),
_ => return Err(()),
ptr
}
} */
// TODO
Err(())
};
// Perform memory block transfer
let status = (|| {
// Load to buffer
self.memory_access(
src,
buf,
count,
|perm| {
matches!(
perm,
Permission::Readonly | Permission::Write | Permission::Exec
)
},
|src, dst, count| core::ptr::copy(src, dst, count),
traph,
)
.map_err(|_| BlkCopyError::Load)?;
// Store from buffer
self.memory_access(
dst,
buf,
count,
|perm| perm == Permission::Write,
|dst, src, count| core::ptr::copy(src, dst, count),
traph,
)
.map_err(|_| BlkCopyError::Store)?;
Ok::<_, BlkCopyError>(())
})();
// Deallocate if used heap-allocated array
if count > STACK_BUFFER_SIZE {
alloc::alloc::dealloc(
buf,
core::alloc::Layout::from_size_align_unchecked(count, 1),
);
}
status
}
/// Split address to pages, check their permissions and feed pointers with offset
@ -326,3 +371,31 @@ pub struct LoadError;
/// Unhandled store access trap
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub struct StoreError;
/// Unhandled block transfer trap
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub enum BlkCopyError {
Load,
Store,
}
impl From<LoadError> for VmRunError {
fn from(_: LoadError) -> Self {
Self::LoadAccessEx
}
}
impl From<StoreError> for VmRunError {
fn from(_: StoreError) -> Self {
Self::StoreAccessEx
}
}
impl From<BlkCopyError> for VmRunError {
fn from(value: BlkCopyError) -> Self {
match value {
BlkCopyError::Load => Self::LoadAccessEx,
BlkCopyError::Store => Self::StoreAccessEx,
}
}
}

View file

@ -26,7 +26,6 @@ use {
value::Value,
};
/// Extract a parameter from program
macro_rules! param {
($self:expr, $ty:ty) => {{
@ -227,47 +226,30 @@ impl<'a, T: HandleTrap> Vm<'a, T> {
_ => 0,
};
if self
.memory
.load(
self.read_reg(base).as_u64() + off + n as u64,
self.registers.as_mut_ptr().add(usize::from(dst) + n).cast(),
usize::from(count).saturating_sub(n),
&mut self.traph,
)
.is_err()
{
return Err(VmRunError::LoadAccessEx);
}
self.memory.load(
self.read_reg(base).as_u64() + off + n as u64,
self.registers.as_mut_ptr().add(usize::from(dst) + n).cast(),
usize::from(count).saturating_sub(n),
&mut self.traph,
)?;
}
ST => {
let ParamBBDH(dst, base, off, count) = param!(self, ParamBBDH);
if self
.memory
.store(
self.read_reg(base).as_u64() + off,
self.registers.as_ptr().add(usize::from(dst)).cast(),
count.into(),
&mut self.traph,
)
.is_err()
{
return Err(VmRunError::LoadAccessEx);
}
self.memory.store(
self.read_reg(base).as_u64() + off,
self.registers.as_ptr().add(usize::from(dst)).cast(),
count.into(),
&mut self.traph,
)?;
}
BMC => {
let ParamBBD(src, dst, count) = param!(self, ParamBBD);
if self
.memory
.block_copy(
self.read_reg(src).as_u64(),
self.read_reg(dst).as_u64(),
count,
)
.is_err()
{
return Err(VmRunError::LoadAccessEx);
}
self.memory.block_copy(
self.read_reg(src).as_u64(),
self.read_reg(dst).as_u64(),
count as _,
&mut self.traph,
)?;
}
BRC => {
let ParamBBB(src, dst, count) = param!(self, ParamBBB);