Compare commits

...

9 commits

Author SHA1 Message Date
Erin ce878c2319 a 2023-06-25 00:15:55 +02:00
Erin 498e729c90 Block memory copy 2023-06-25 00:07:53 +02:00
Erin 6356b7dd24 more docs 2023-06-23 23:55:41 +02:00
Erin a47b556317 comments 2023-06-23 23:49:53 +02:00
Erin b237ad90ba u 2023-06-23 09:50:39 +02:00
Erin 4fd96e1b0e interrupt handling 2023-06-23 09:44:26 +02:00
Erin 965a6a2c19 redone page size thing 2023-06-22 11:55:33 +02:00
Erin a08e0172c9 changed sigs 2023-06-22 11:43:32 +02:00
Erin 60ca26dcd2 table 2023-06-22 11:37:48 +02:00
10 changed files with 481 additions and 142 deletions

35
Cargo.lock generated
View file

@ -25,6 +25,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "delegate"
version = "0.9.0"
@ -36,6 +42,19 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "derive_more"
version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version",
"syn 1.0.109",
]
[[package]]
name = "fnv"
version = "1.0.7"
@ -70,6 +89,7 @@ name = "hbvm"
version = "0.1.0"
dependencies = [
"delegate",
"derive_more",
"hashbrown",
"hbbytecode",
"log",
@ -163,6 +183,21 @@ version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "semver"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "static_assertions"
version = "1.1.0"

View file

@ -8,6 +8,7 @@ lto = true
[dependencies]
delegate = "0.9"
derive_more = "0.99"
hashbrown = "0.13"
hbbytecode.path = "../hbbytecode"
log = "0.4"

View file

@ -3,20 +3,3 @@ extern crate alloc;
pub mod validate;
pub mod vm;
#[derive(Debug, PartialEq)]
pub enum RuntimeErrors {
InvalidOpcodePair(u8, u8),
RegisterTooSmall,
HostError(u64),
PageNotMapped(u64),
InvalidJumpAddress(u64),
InvalidSystemCall(u8),
}
// If you solve the halting problem feel free to remove this
#[derive(PartialEq, Debug)]
pub enum HaltStatus {
Halted,
Running,
}

View file

@ -1,3 +1,9 @@
use hbvm::vm::{
mem::{Memory, MemoryAccessReason, PageSize},
trap::HandleTrap,
value::Value,
};
use {
hbvm::{validate::validate, vm::Vm},
std::io::{stdin, Read},
@ -6,13 +12,13 @@ use {
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut prog = vec![];
stdin().read_to_end(&mut prog)?;
if let Err(e) = validate(&prog) {
eprintln!("Program validation error: {e:?}");
return Ok(());
} else {
unsafe {
let mut vm = Vm::new_unchecked(&prog);
let mut vm = Vm::new_unchecked(&prog, TestTrapHandler);
vm.memory.insert_test_page();
println!("Program interrupt: {:?}", vm.run());
println!("{:?}", vm.registers);
@ -24,3 +30,30 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
pub fn time() -> u32 {
9
}
struct TestTrapHandler;
impl HandleTrap for TestTrapHandler {
fn page_fault(
&mut self,
_: MemoryAccessReason,
_: &mut Memory,
_: u64,
_: PageSize,
_: *mut u8,
) -> bool {
false
}
fn invalid_op(&mut self, _: &mut [Value; 256], _: &mut usize, _: &mut Memory, _: u8) -> bool
where
Self: Sized,
{
false
}
fn ecall(&mut self, _: &mut [Value; 256], _: &mut usize, _: &mut Memory)
where
Self: Sized,
{
}
}

View file

@ -1,21 +1,31 @@
/// Program validation error kind
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ErrorKind {
/// Unknown opcode
InvalidInstruction,
/// VM doesn't implement this valid opcode
Unimplemented,
/// Attempted to copy over register boundary
RegisterArrayOverflow,
}
/// Error
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Error {
/// Kind
pub kind: ErrorKind,
/// Location in bytecode
pub index: usize,
}
/// Perform bytecode validation. If it passes, the program should be
/// sound to execute.
pub fn validate(mut program: &[u8]) -> Result<(), Error> {
use hbbytecode::opcode::*;
let start = program;
loop {
// Match on instruction types and perform necessary checks
program = match program {
[] => return Ok(()),
[LD..=ST, reg, _, _, _, _, _, _, _, _, _, count, ..]

View file

@ -1,10 +1,16 @@
mod paging;
use self::paging::{PageTable, Permission, PtEntry};
use alloc::boxed::Box;
use core::mem::MaybeUninit;
use self::paging::{PageTable, Permission, PtEntry};
use super::{trap::HandleTrap, VmRunError};
use alloc::boxed::Box;
use derive_more::Display;
/// HoleyBytes virtual memory
#[derive(Clone, Debug)]
pub struct Memory {
/// Root page table
root_pt: *mut PageTable,
}
@ -47,13 +53,23 @@ impl Memory {
entry = PtEntry::new(Box::into_raw(pt) as _, Permission::Node);
}
self.root_pt_mut()[0] = entry;
(*self.root_pt)[0] = entry;
}
}
/// Load value from an address
pub unsafe fn load(&self, addr: u64, target: *mut u8, count: usize) -> Result<(), ()> {
///
/// # Safety
/// Applies same conditions as for [`core::ptr::copy_nonoverlapping`]
pub unsafe fn load(
&mut self,
addr: u64,
target: *mut u8,
count: usize,
traph: &mut impl HandleTrap,
) -> Result<(), LoadError> {
self.memory_access(
MemoryAccessReason::Load,
addr,
target,
count,
@ -64,96 +80,193 @@ impl Memory {
)
},
|src, dst, count| core::ptr::copy_nonoverlapping(src, dst, count),
traph,
)
.map_err(|_| LoadError)
}
/// Store value to an address
pub unsafe fn store(&mut self, addr: u64, source: *const u8, count: usize) -> Result<(), ()> {
///
/// # Safety
/// Applies same conditions as for [`core::ptr::copy_nonoverlapping`]
pub unsafe fn store(
&mut self,
addr: u64,
source: *const u8,
count: usize,
traph: &mut impl HandleTrap,
) -> Result<(), StoreError> {
self.memory_access(
MemoryAccessReason::Store,
addr,
source.cast_mut(),
count,
|perm| perm == Permission::Write,
|dst, src, count| core::ptr::copy_nonoverlapping(src, dst, count),
traph,
)
.map_err(|_| StoreError)
}
/// Copy a block of memory
pub unsafe fn block_copy(&mut self, src: u64, dst: u64, count: u64) -> Result<(), ()> {
let count = usize::try_from(count).expect("?conradluget a better CPU");
///
/// # Safety
/// - Same as for [`Self::load`] and [`Self::store`]
/// - Your faith in the gods of UB
/// - Addr-san claims it's fine but who knows is she isn't lying :ferrisSus:
pub unsafe fn block_copy(
&mut self,
src: u64,
dst: u64,
count: usize,
traph: &mut impl HandleTrap,
) -> Result<(), MemoryAccessReason> {
// Yea, i know it is possible to do this more efficiently, but I am too lazy.
let mut srcs = PageSplitter::new(src, count, self.root_pt);
let mut dsts = PageSplitter::new(dst, count, self.root_pt);
let mut c_src = srcs.next().ok_or(())?;
let mut c_dst = dsts.next().ok_or(())?;
const STACK_BUFFER_SIZE: usize = 512;
loop {
let min_size = c_src.size.min(c_dst.size);
// Decide if to use stack-allocated buffer or to heap allocate
// Deallocation is again decided on size at the end of the function
let mut buf = MaybeUninit::<[u8; STACK_BUFFER_SIZE]>::uninit();
let buf = if count <= STACK_BUFFER_SIZE {
buf.as_mut_ptr().cast()
} else {
unsafe {
core::ptr::copy(c_src.ptr, c_dst.ptr, min_size);
}
let layout = core::alloc::Layout::from_size_align_unchecked(count, 1);
let ptr = alloc::alloc::alloc(layout);
if ptr.is_null() {
alloc::alloc::handle_alloc_error(layout);
}
match (
match c_src.size.saturating_sub(min_size) {
0 => srcs.next(),
size => Some(PageSplitResult { size, ..c_src }),
},
match c_dst.size.saturating_sub(min_size) {
0 => dsts.next(),
size => Some(PageSplitResult { size, ..c_dst }),
},
) {
(None, None) => return Ok(()),
(Some(src), Some(dst)) => (c_src, c_dst) = (src, dst),
_ => return Err(()),
ptr
}
};
// Perform memory block transfer
let status = (|| {
// Load to buffer
self.memory_access(
MemoryAccessReason::Load,
src,
buf,
count,
|perm| {
matches!(
perm,
Permission::Readonly | Permission::Write | Permission::Exec
)
},
|src, dst, count| core::ptr::copy(src, dst, count),
traph,
)
.map_err(|_| MemoryAccessReason::Load)?;
// Store from buffer
self.memory_access(
MemoryAccessReason::Store,
dst,
buf,
count,
|perm| perm == Permission::Write,
|dst, src, count| core::ptr::copy(src, dst, count),
traph,
)
.map_err(|_| MemoryAccessReason::Store)?;
Ok::<_, MemoryAccessReason>(())
})();
// Deallocate if used heap-allocated array
if count > STACK_BUFFER_SIZE {
alloc::alloc::dealloc(
buf,
core::alloc::Layout::from_size_align_unchecked(count, 1),
);
}
status
}
#[inline]
pub fn root_pt(&self) -> &PageTable {
unsafe { &*self.root_pt }
}
#[inline]
pub fn root_pt_mut(&mut self) -> &mut PageTable {
unsafe { &mut *self.root_pt }
}
/// Split address to pages, check their permissions and feed pointers with offset
/// to a specified function.
///
/// If page is not found, execute page fault trap handler.
#[allow(clippy::too_many_arguments)] // Silence peasant
fn memory_access(
&self,
&mut self,
reason: MemoryAccessReason,
src: u64,
mut dst: *mut u8,
len: usize,
permission_check: impl Fn(Permission) -> bool,
action: impl Fn(*mut u8, *mut u8, usize),
permission_check: fn(Permission) -> bool,
action: fn(*mut u8, *mut u8, usize),
traph: &mut impl HandleTrap,
) -> Result<(), ()> {
for PageSplitResult { ptr, size, perm } in PageSplitter::new(src, len, self.root_pt) {
if !permission_check(perm) {
return Err(());
let mut pspl = AddrSplitter::new(src, len, self.root_pt);
loop {
match pspl.next() {
// Page found
Some(Ok(AddrSplitOk { ptr, size, perm })) => {
if !permission_check(perm) {
return Err(());
}
// Perform memory action and bump dst pointer
action(ptr, dst, size);
dst = unsafe { dst.add(size) };
}
Some(Err(AddrSplitError { addr, size })) => {
// Execute page fault handler
if traph.page_fault(reason, self, addr, size, dst) {
// Shift the splitter address
pspl.bump(size);
// Bump dst pointer
dst = unsafe { dst.add(size as _) };
} else {
return Err(()); // Unhandleable
}
}
None => return Ok(()),
}
action(ptr, dst, size);
dst = unsafe { dst.add(size) };
}
Ok(())
}
}
struct PageSplitResult {
/// Result from address split
struct AddrSplitOk {
/// Pointer to the start for perform operation
ptr: *mut u8,
/// Size to the end of page / end of desired size
size: usize,
/// Page permission
perm: Permission,
}
struct PageSplitter {
struct AddrSplitError {
/// Address of failure
addr: u64,
/// Requested page size
size: PageSize,
}
/// Address splitter into pages
struct AddrSplitter {
/// Current address
addr: u64,
/// Size left
size: usize,
/// Page table
pagetable: *const PageTable,
}
impl PageSplitter {
impl AddrSplitter {
/// Create a new page splitter
pub const fn new(addr: u64, size: usize, pagetable: *const PageTable) -> Self {
Self {
addr,
@ -161,19 +274,29 @@ impl PageSplitter {
pagetable,
}
}
/// Bump address by size X
fn bump(&mut self, page_size: PageSize) {
self.addr += page_size as u64;
self.size = self.size.saturating_sub(page_size as _);
}
}
impl Iterator for PageSplitter {
type Item = PageSplitResult;
impl Iterator for AddrSplitter {
type Item = Result<AddrSplitOk, AddrSplitError>;
fn next(&mut self) -> Option<Self::Item> {
// The end, everything is fine
if self.size == 0 {
return None;
}
let (base, perm, size, offset) = 'a: {
let mut current_pt = self.pagetable;
// Walk the page table
for lvl in (0..5).rev() {
// Get an entry
unsafe {
let entry = (*current_pt).get_unchecked(
usize::try_from((self.addr >> (lvl * 9 + 12)) & ((1 << 9) - 1))
@ -182,34 +305,102 @@ impl Iterator for PageSplitter {
let ptr = entry.ptr();
match entry.permission() {
Permission::Empty => return None,
// No page → page fault
Permission::Empty => {
return Some(Err(AddrSplitError {
addr: self.addr,
size: PageSize::from_lvl(lvl)?,
}))
}
// Node → proceed waking
Permission::Node => current_pt = ptr as _,
// Leaft → return relevant data
perm => {
break 'a (
// Pointer in host memory
ptr as *mut u8,
perm,
match lvl {
0 => 4096,
1 => 1024_usize.pow(2) * 2,
2 => 1024_usize.pow(3),
_ => return None,
},
PageSize::from_lvl(lvl)?,
// In-page offset
self.addr as usize & ((1 << (lvl * 9 + 12)) - 1),
)
);
}
}
}
}
return None;
return None; // Reached the end (should not happen)
};
let avail = (size - offset).clamp(0, self.size);
self.addr += size as u64;
self.size = self.size.saturating_sub(size);
Some(PageSplitResult {
ptr: unsafe { base.add(offset) },
// Get available byte count in the selected page with offset
let avail = (size as usize - offset).clamp(0, self.size);
self.bump(size);
Some(Ok(AddrSplitOk {
ptr: unsafe { base.add(offset) }, // Return pointer to the start of region
size: avail,
perm,
})
}))
}
}
/// Page size
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PageSize {
/// 4 KiB page (on level 0)
Size4K = 4096,
/// 2 MiB page (on level 1)
Size2M = 1024 * 1024 * 2,
/// 1 GiB page (on level 2)
Size1G = 1024 * 1024 * 1024,
}
impl PageSize {
/// Convert page table level to size of page
fn from_lvl(lvl: u8) -> Option<Self> {
match lvl {
0 => Some(PageSize::Size4K),
1 => Some(PageSize::Size2M),
2 => Some(PageSize::Size1G),
_ => None,
}
}
}
/// Unhandled load access trap
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub struct LoadError;
/// Unhandled store access trap
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub struct StoreError;
#[derive(Clone, Copy, Display, Debug, PartialEq, Eq)]
pub enum MemoryAccessReason {
Load,
Store,
}
impl From<MemoryAccessReason> for VmRunError {
fn from(value: MemoryAccessReason) -> Self {
match value {
MemoryAccessReason::Load => Self::LoadAccessEx,
MemoryAccessReason::Store => Self::StoreAccessEx,
}
}
}
impl From<LoadError> for VmRunError {
fn from(_: LoadError) -> Self {
Self::LoadAccessEx
}
}
impl From<StoreError> for VmRunError {
fn from(_: StoreError) -> Self {
Self::StoreAccessEx
}
}

View file

@ -6,30 +6,40 @@ use core::{
};
use delegate::delegate;
/// Page entry permission
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
#[repr(u8)]
pub enum Permission {
/// No page present
#[default]
Empty,
/// Points to another pagetable
Node,
/// Page is read only
Readonly,
/// Page is readable and writable
Write,
/// Page is readable and executable
Exec,
}
/// Page table entry
#[derive(Clone, Copy, Default, PartialEq, Eq)]
pub struct PtEntry(u64);
impl PtEntry {
/// Create new
#[inline]
pub unsafe fn new(ptr: *mut PtPointedData, permission: Permission) -> Self {
Self(ptr as u64 | permission as u64)
}
/// Get permission
#[inline]
pub fn permission(&self) -> Permission {
unsafe { core::mem::transmute(self.0 as u8 & 0b111) }
}
/// Get pointer to the data (leaf) or next page table (node)
#[inline]
pub fn ptr(&self) -> *mut PtPointedData {
(self.0 & !((1 << 12) - 1)) as _
@ -45,6 +55,8 @@ impl Debug for PtEntry {
}
}
/// Page table
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(align(4096))]
pub struct PageTable([PtEntry; 512]);
@ -89,13 +101,17 @@ where
impl Default for PageTable {
fn default() -> Self {
// SAFETY: It's fine, zeroed page table entry is valid (= empty)
Self(unsafe { MaybeUninit::zeroed().assume_init() })
}
}
/// Data page table entry can possibly point to
#[derive(Clone, Copy)]
#[repr(C, align(4096))]
pub union PtPointedData {
/// Node - next page table
pub pt: PageTable,
/// Leaf
pub page: u8,
}

View file

@ -11,8 +11,11 @@
// program size. If you are (rightfully) worried about the UB, for now just
// append your program with 11 zeroes.
mod mem;
mod value;
use self::trap::HandleTrap;
pub mod mem;
pub mod trap;
pub mod value;
use {
crate::validate,
@ -23,6 +26,7 @@ use {
value::Value,
};
/// Extract a parameter from program
macro_rules! param {
($self:expr, $ty:ty) => {{
assert_impl_one!($ty: OpParam);
@ -37,6 +41,7 @@ macro_rules! param {
}};
}
/// Perform binary operation `#0 ← #1 OP #2`
macro_rules! binary_op {
($self:expr, $ty:ident, $handler:expr) => {{
let ParamBBB(tg, a0, a1) = param!($self, ParamBBB);
@ -51,6 +56,7 @@ macro_rules! binary_op {
}};
}
/// Perform binary operation with immediate `#0 ← #1 OP imm #2`
macro_rules! binary_op_imm {
($self:expr, $ty:ident, $handler:expr) => {{
let ParamBBD(tg, a0, imm) = param!($self, ParamBBD);
@ -61,6 +67,7 @@ macro_rules! binary_op_imm {
}};
}
/// Jump at `#3` if ordering on `#0 <=> #1` is equal to expected
macro_rules! cond_jump {
($self:expr, $ty:ident, $expected:ident) => {{
let ParamBBD(a0, a1, jt) = param!($self, ParamBBD);
@ -72,36 +79,59 @@ macro_rules! cond_jump {
}};
}
pub struct Vm<'a> {
/// HoleyBytes Virtual Machine
pub struct Vm<'a, T> {
/// Holds 256 registers
///
/// Writing to register 0 is considered undefined behaviour
/// in terms of HoleyBytes program execution
pub registers: [Value; 256],
/// Memory implementation
pub memory: Memory,
/// Trap handler
pub traph: T,
// Program counter
pc: usize,
/// Program
program: &'a [u8],
}
impl<'a> Vm<'a> {
impl<'a, T: HandleTrap> Vm<'a, T> {
/// Create a new VM with program and trap handler
///
/// # Safety
/// Program code has to be validated
pub unsafe fn new_unchecked(program: &'a [u8]) -> Self {
pub unsafe fn new_unchecked(program: &'a [u8], traph: T) -> Self {
Self {
registers: [Value::from(0_u64); 256],
memory: Default::default(),
traph,
pc: 0,
program,
}
}
pub fn new_validated(program: &'a [u8]) -> Result<Self, validate::Error> {
/// Create a new VM with program and trap handler only if it passes validation
pub fn new_validated(program: &'a [u8], traph: T) -> Result<Self, validate::Error> {
validate::validate(program)?;
Ok(unsafe { Self::new_unchecked(program) })
Ok(unsafe { Self::new_unchecked(program, traph) })
}
pub fn run(&mut self) -> HaltReason {
/// Execute program
///
/// Program can return [`VmRunError`] if a trap handling failed
pub fn run(&mut self) -> Result<(), VmRunError> {
use hbbytecode::opcode::*;
loop {
// Fetch instruction
let Some(&opcode) = self.program.get(self.pc)
else { return HaltReason::ProgramEnd };
else { return Ok(()) };
// Big match
unsafe {
match opcode {
NOP => param!(self, ()),
@ -196,45 +226,30 @@ impl<'a> Vm<'a> {
_ => 0,
};
if self
.memory
.load(
self.read_reg(base).as_u64() + off + n as u64,
self.registers.as_mut_ptr().add(usize::from(dst) + n).cast(),
usize::from(count).saturating_sub(n),
)
.is_err()
{
return HaltReason::LoadAccessEx;
}
self.memory.load(
self.read_reg(base).as_u64() + off + n as u64,
self.registers.as_mut_ptr().add(usize::from(dst) + n).cast(),
usize::from(count).saturating_sub(n),
&mut self.traph,
)?;
}
ST => {
let ParamBBDH(dst, base, off, count) = param!(self, ParamBBDH);
if self
.memory
.store(
self.read_reg(base).as_u64() + off,
self.registers.as_ptr().add(usize::from(dst)).cast(),
count.into(),
)
.is_err()
{
return HaltReason::LoadAccessEx;
}
self.memory.store(
self.read_reg(base).as_u64() + off,
self.registers.as_ptr().add(usize::from(dst)).cast(),
count.into(),
&mut self.traph,
)?;
}
BMC => {
let ParamBBD(src, dst, count) = param!(self, ParamBBD);
if self
.memory
.block_copy(
self.read_reg(src).as_u64(),
self.read_reg(dst).as_u64(),
count,
)
.is_err()
{
return HaltReason::LoadAccessEx;
}
self.memory.block_copy(
self.read_reg(src).as_u64(),
self.read_reg(dst).as_u64(),
count as _,
&mut self.traph,
)?;
}
BRC => {
let ParamBBB(src, dst, count) = param!(self, ParamBBB);
@ -261,7 +276,8 @@ impl<'a> Vm<'a> {
JGTU => cond_jump!(self, sint, Greater),
ECALL => {
param!(self, ());
return HaltReason::Ecall;
self.traph
.ecall(&mut self.registers, &mut self.pc, &mut self.memory);
}
ADDF => binary_op!(self, as_f64, ops::Add::add),
MULF => binary_op!(self, as_f64, ops::Mul::mul),
@ -274,21 +290,29 @@ impl<'a> Vm<'a> {
}
ADDFI => binary_op_imm!(self, as_f64, ops::Add::add),
MULFI => binary_op_imm!(self, as_f64, ops::Mul::mul),
_ => return HaltReason::InvalidOpcode,
op => {
if !self.traph.invalid_op(
&mut self.registers,
&mut self.pc,
&mut self.memory,
op,
) {
return Err(VmRunError::InvalidOpcodeEx);
}
}
}
}
}
}
/// Read register
#[inline]
unsafe fn read_reg(&self, n: u8) -> Value {
if n == 0 {
0_u64.into()
} else {
*self.registers.get_unchecked(n as usize)
}
*self.registers.get_unchecked(n as usize)
}
/// Write a register.
/// Writing to register 0 is no-op.
#[inline]
unsafe fn write_reg(&mut self, n: u8, value: Value) {
if n != 0 {
@ -297,12 +321,16 @@ impl<'a> Vm<'a> {
}
}
/// Virtual machine halt error
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum HaltReason {
ProgramEnd,
Ecall,
InvalidOpcode,
pub enum VmRunError {
/// Unhandled invalid opcode exceptions
InvalidOpcodeEx,
/// Unhandled load access exception
LoadAccessEx,
/// Unhandled store access exception
StoreAccessEx,
}

33
hbvm/src/vm/trap.rs Normal file
View file

@ -0,0 +1,33 @@
use super::{
mem::{Memory, MemoryAccessReason, PageSize},
value::Value,
};
/// Handle VM traps
pub trait HandleTrap {
/// Handle page fault
fn page_fault(
&mut self,
reason: MemoryAccessReason,
memory: &mut Memory,
vaddr: u64,
size: PageSize,
dataptr: *mut u8,
) -> bool;
/// Handle invalid opcode exception
fn invalid_op(
&mut self,
regs: &mut [Value; 256],
pc: &mut usize,
memory: &mut Memory,
op: u8,
) -> bool
where
Self: Sized;
/// Handle environment calls
fn ecall(&mut self, regs: &mut [Value; 256], pc: &mut usize, memory: &mut Memory)
where
Self: Sized;
}

View file

@ -1,7 +1,13 @@
use core::fmt::Debug;
/// Define [`Value`] union
///
/// # Safety
/// Union variants have to be sound to byte-reinterpretate
/// between each other. Otherwise the behaviour is undefined.
macro_rules! value_def {
($($ty:ident),* $(,)?) => {
/// HBVM register value
#[derive(Copy, Clone)]
#[repr(packed)]
pub union Value {
@ -10,6 +16,7 @@ macro_rules! value_def {
paste::paste! {
impl Value {$(
#[doc = "Byte-reinterpret [`Value`] as [`" $ty "`]"]
#[inline]
pub fn [<as_ $ty>](&self) -> $ty {
unsafe { self.$ty }
@ -29,9 +36,11 @@ macro_rules! value_def {
}
value_def!(u64, i64, f64);
static_assertions::const_assert_eq!(core::mem::size_of::<Value>(), 8);
impl Debug for Value {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.as_u64().fmt(f)
// Print formatted as hexadecimal, unsigned integer
write!(f, "{:x}", self.as_u64())
}
}