1
0
Fork 0
forked from koniifer/ableos

Von-Neumann?

This commit is contained in:
Erin 2023-08-09 02:33:03 +02:00
parent eadf9e0a1f
commit 430ccd170d
5 changed files with 126 additions and 76 deletions

View file

@ -13,6 +13,9 @@ libfuzzer-sys = "0.4"
[dependencies.hbvm]
path = ".."
[dependencies.hbbytecode]
path = "../../hbbytecode"
# Prevent this from interfering with workspaces
[workspace]
members = ["."]

View file

@ -1,15 +1,30 @@
#![no_main]
use {
hbbytecode::valider::validate,
hbvm::{
mem::{HandlePageFault, Memory, MemoryAccessReason, PageSize},
Vm,
softpaging::{
paging::{PageTable, Permission},
HandlePageFault, PageSize, SoftPagedMem,
},
MemoryAccessReason, Vm,
},
libfuzzer_sys::fuzz_target,
};
fuzz_target!(|data: &[u8]| {
if let Ok(mut vm) = Vm::<_, 16384>::new_validated(data, TestTrapHandler, Default::default()) {
if validate(data).is_ok() {
let mut vm = unsafe {
Vm::<_, 16384>::new(
SoftPagedMem {
pf_handler: TestTrapHandler,
program: data,
root_pt: Box::into_raw(Default::default()),
},
0,
)
};
// Alloc and map some memory
let pages = [
alloc_and_map(&mut vm.memory, 0),
@ -26,22 +41,17 @@ fuzz_target!(|data: &[u8]| {
}
});
fn alloc_and_map(memory: &mut Memory, at: u64) -> *mut u8 {
fn alloc_and_map(memory: &mut SoftPagedMem<TestTrapHandler>, at: u64) -> *mut u8 {
let ptr = Box::into_raw(Box::<Page>::default()).cast();
unsafe {
memory
.map(
ptr,
at,
hbvm::mem::paging::Permission::Write,
PageSize::Size4K,
)
.map(ptr, at, Permission::Write, PageSize::Size4K)
.unwrap()
};
ptr
}
fn unmap_and_dealloc(memory: &mut Memory, ptr: *mut u8, from: u64) {
fn unmap_and_dealloc(memory: &mut SoftPagedMem<TestTrapHandler>, ptr: *mut u8, from: u64) {
memory.unmap(from).unwrap();
let _ = unsafe { Box::from_raw(ptr.cast::<Page>()) };
}
@ -59,7 +69,7 @@ impl HandlePageFault for TestTrapHandler {
fn page_fault(
&mut self,
_: MemoryAccessReason,
_: &mut Memory,
_: &mut PageTable,
_: u64,
_: PageSize,
_: *mut u8,

View file

@ -14,8 +14,6 @@
#![cfg_attr(feature = "nightly", feature(fn_align))]
#![warn(missing_docs, clippy::missing_docs_in_private_items)]
use core::marker::PhantomData;
#[cfg(feature = "alloc")]
extern crate alloc;
@ -26,16 +24,14 @@ mod bmc;
use {
bmc::BlockCopier,
core::{cmp::Ordering, mem::size_of, ops},
core::{cmp::Ordering, mem::size_of, ops, slice::SliceIndex},
derive_more::Display,
hbbytecode::{
valider, OpParam, ParamBB, ParamBBB, ParamBBBB, ParamBBD, ParamBBDH, ParamBBW, ParamBD,
},
hbbytecode::{OpParam, ParamBB, ParamBBB, ParamBBBB, ParamBBD, ParamBBDH, ParamBBW, ParamBD},
value::{Value, ValueVariant},
};
/// HoleyBytes Virtual Machine
pub struct Vm<'a, Mem, const TIMER_QUOTIENT: usize> {
pub struct Vm<Mem, const TIMER_QUOTIENT: usize> {
/// Holds 256 registers
///
/// Writing to register 0 is considered undefined behaviour
@ -48,15 +44,6 @@ pub struct Vm<'a, Mem, const TIMER_QUOTIENT: usize> {
/// Program counter
pub pc: usize,
/// Program
program: *const u8,
/// Cached program length (without unreachable end)
program_len: usize,
/// Program lifetime
_program_lt: PhantomData<&'a [u8]>,
/// Program timer
timer: usize,
@ -64,7 +51,7 @@ pub struct Vm<'a, Mem, const TIMER_QUOTIENT: usize> {
copier: Option<BlockCopier>,
}
impl<'a, Mem, const TIMER_QUOTIENT: usize> Vm<'a, Mem, TIMER_QUOTIENT>
impl<Mem, const TIMER_QUOTIENT: usize> Vm<Mem, TIMER_QUOTIENT>
where
Mem: Memory,
{
@ -72,25 +59,16 @@ where
///
/// # Safety
/// Program code has to be validated
pub unsafe fn new_unchecked(program: &'a [u8], memory: Mem) -> Self {
pub unsafe fn new(memory: Mem, entry: u64) -> Self {
Self {
registers: [Value::from(0_u64); 256],
memory,
pc: 0,
program_len: program.len() - 12,
program: program[4..].as_ptr(),
_program_lt: Default::default(),
pc: entry as _,
timer: 0,
copier: None,
}
}
/// Create a new VM with program and trap handler only if it passes validation
pub fn new_validated(program: &'a [u8], memory: Mem) -> Result<Self, valider::Error> {
valider::validate(program)?;
Ok(unsafe { Self::new_unchecked(program, memory) })
}
/// Execute program
///
/// Program can return [`VmRunError`] if a trap handling failed
@ -98,11 +76,6 @@ where
pub fn run(&mut self) -> Result<VmRunOk, VmRunError> {
use hbbytecode::opcode::*;
loop {
// Check instruction boundary
if self.pc >= self.program_len {
return Err(VmRunError::AddrOutOfBounds);
}
// Big match
//
// Contribution guide:
@ -123,7 +96,11 @@ where
// - Yes, we assume you run 64 bit CPU. Else ?conradluget a better CPU
// sorry 8 bit fans, HBVM won't run on your Speccy :(
unsafe {
match *self.program.add(self.pc) {
match *self
.memory
.load_prog(self.pc)
.ok_or(VmRunError::ProgramFetchLoadEx(self.pc as _))?
{
UN => {
self.decode::<()>();
return Err(VmRunError::Unreachable);
@ -388,15 +365,22 @@ where
}
/// Decode instruction operands
#[inline]
#[inline(always)]
unsafe fn decode<T: OpParam>(&mut self) -> T {
let data = self.program.add(self.pc + 1).cast::<T>().read();
let pc1 = self.pc + 1;
let data = self
.memory
.load_prog_unchecked(pc1..pc1 + size_of::<T>())
.as_ptr()
.cast::<T>()
.read();
self.pc += 1 + size_of::<T>();
data
}
/// Perform binary operating over two registers
#[inline]
#[inline(always)]
unsafe fn binary_op<T: ValueVariant>(&mut self, op: impl Fn(T, T) -> T) {
let ParamBBB(tg, a0, a1) = self.decode();
self.write_reg(
@ -406,7 +390,7 @@ where
}
/// Perform binary operation over register and immediate
#[inline]
#[inline(always)]
unsafe fn binary_op_imm<T: ValueVariant>(&mut self, op: impl Fn(T, T) -> T) {
let ParamBBD(tg, reg, imm) = self.decode();
self.write_reg(
@ -416,14 +400,14 @@ where
}
/// Perform binary operation over register and shift immediate
#[inline]
#[inline(always)]
unsafe fn binary_op_ims<T: ValueVariant>(&mut self, op: impl Fn(T, u32) -> T) {
let ParamBBW(tg, reg, imm) = self.decode();
self.write_reg(tg, op(self.read_reg(reg).cast::<T>(), imm));
}
/// Jump at `#3` if ordering on `#0 <=> #1` is equal to expected
#[inline]
#[inline(always)]
unsafe fn cond_jmp<T: ValueVariant + Ord>(&mut self, expected: Ordering) {
let ParamBBD(a0, a1, ja) = self.decode();
if self
@ -437,14 +421,14 @@ where
}
/// Read register
#[inline]
#[inline(always)]
unsafe fn read_reg(&self, n: u8) -> Value {
*self.registers.get_unchecked(n as usize)
}
/// Write a register.
/// Writing to register 0 is no-op.
#[inline]
#[inline(always)]
unsafe fn write_reg(&mut self, n: u8, value: impl Into<Value>) {
if n != 0 {
*self.registers.get_unchecked_mut(n as usize) = value.into();
@ -452,7 +436,7 @@ where
}
/// Load / Store Address check-computation überfunction
#[inline]
#[inline(always)]
unsafe fn ldst_addr_uber(
&self,
dst: u8,
@ -485,6 +469,9 @@ pub enum VmRunError {
/// Unhandled load access exception
LoadAccessEx(u64),
/// Unhandled instruction load access exception
ProgramFetchLoadEx(u64),
/// Unhandled store access exception
StoreAccessEx(u64),
@ -529,6 +516,40 @@ pub trait Memory {
source: *const u8,
count: usize,
) -> Result<(), StoreError>;
/// Fetch bytes from program section
///
/// # Why?
/// Even Holey Bytes programs operate with
/// single address space, the actual implementation
/// may be different, so for these reasons there is a
/// separate function.
///
/// Also if your memory implementation differentiates between
/// readable and executable memory, this is the way to distinguish
/// the loads.
///
/// # Notice for implementors
/// This is a hot function. This is called on each opcode fetch
/// and instruction decode. Inlining the implementation is highly
/// recommended!
///
/// If you utilise some more heavy memory implementation, consider
/// performing caching as HBVM does not do that for you.
///
/// Has to return all the requested data. If cannot fetch data of requested
/// length, return [`None`].
fn load_prog<I>(&mut self, index: I) -> Option<&I::Output>
where
I: SliceIndex<[u8]>;
/// Fetch bytes from program section, unchecked.
///
/// # Safety
/// You really have to be sure you get the bytes, got me?
unsafe fn load_prog_unchecked<I>(&mut self, index: I) -> &I::Output
where
I: SliceIndex<[u8]>;
}
/// Unhandled load access trap

View file

@ -10,15 +10,20 @@ use {
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut prog = vec![];
stdin().read_to_end(&mut prog)?;
println!("{prog:?}");
if let Err(e) = validate(&prog) {
eprintln!("Program validation error: {e:?}");
return Ok(());
} else {
unsafe {
let mut vm =
Vm::<_, 0>::new_unchecked(&prog, SoftPagedMem::<TestTrapHandler>::default());
let mut vm = Vm::<_, 0>::new(
SoftPagedMem {
pf_handler: TestTrapHandler,
program: &prog,
root_pt: Box::into_raw(Default::default()),
},
0,
);
let data = {
let ptr = std::alloc::alloc_zeroed(std::alloc::Layout::from_size_align_unchecked(
4096, 4096,
@ -32,7 +37,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
vm.memory
.map(
data,
0,
8192,
hbvm::softpaging::paging::Permission::Write,
PageSize::Size4K,
)
@ -46,7 +51,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
data,
std::alloc::Layout::from_size_align_unchecked(4096, 4096),
);
vm.memory.unmap(0).unwrap();
vm.memory.unmap(8192).unwrap();
}
}
Ok(())

View file

@ -4,6 +4,7 @@ pub mod paging;
use {
super::{LoadError, Memory, MemoryAccessReason, StoreError},
core::slice::SliceIndex,
derive_more::Display,
paging::{PageTable, Permission},
};
@ -13,14 +14,16 @@ use {alloc::boxed::Box, paging::PtEntry};
/// HoleyBytes software paged memory
#[derive(Clone, Debug)]
pub struct SoftPagedMem<PfHandler> {
pub struct SoftPagedMem<'p, PfH> {
/// Root page table
pub root_pt: *mut PageTable,
/// Page fault handler
pub pf_handler: PfHandler,
pub pf_handler: PfH,
/// Program memory segment
pub program: &'p [u8],
}
impl<PfHandler: HandlePageFault> Memory for SoftPagedMem<PfHandler> {
impl<'p, PfH: HandlePageFault> Memory for SoftPagedMem<'p, PfH> {
/// Load value from an address
///
/// # Safety
@ -57,9 +60,27 @@ impl<PfHandler: HandlePageFault> Memory for SoftPagedMem<PfHandler> {
)
.map_err(StoreError)
}
/// Fetch slice from program memory section
#[inline(always)]
fn load_prog<I>(&mut self, index: I) -> Option<&I::Output>
where
I: SliceIndex<[u8]>,
{
self.program.get(index)
}
impl<PfHandler: HandlePageFault> SoftPagedMem<PfHandler> {
/// Fetch slice from program memory section, unchecked!
#[inline(always)]
unsafe fn load_prog_unchecked<I>(&mut self, index: I) -> &I::Output
where
I: SliceIndex<[u8]>,
{
self.program.get_unchecked(index)
}
}
impl<'p, PfH: HandlePageFault> SoftPagedMem<'p, PfH> {
// Everyone behold, the holy function, the god of HBVM memory accesses!
/// Split address to pages, check their permissions and feed pointers with offset
@ -239,24 +260,14 @@ impl Iterator for AddrPageLookuper {
}
#[cfg(feature = "alloc")]
impl<PfHandler: Default> Default for SoftPagedMem<PfHandler> {
fn default() -> Self {
Self {
root_pt: Box::into_raw(Default::default()),
pf_handler: Default::default(),
}
}
}
#[cfg(feature = "alloc")]
impl<A> Drop for SoftPagedMem<A> {
impl<'p, A> Drop for SoftPagedMem<'p, A> {
fn drop(&mut self) {
let _ = unsafe { Box::from_raw(self.root_pt) };
}
}
#[cfg(feature = "alloc")]
impl<A> SoftPagedMem<A> {
impl<'p, A> SoftPagedMem<'p, A> {
/// Maps host's memory into VM's memory
///
/// # Safety