pub use self::reg::{RET_ADDR, STACK_PTR, ZERO};
use {
crate::{
ident::{self, Ident},
instrs::{self, *},
lexer::TokenKind,
parser::{
self, find_symbol, idfl, CommentOr, CtorField, Expr, ExprRef, FileId, Pos, StructField,
},
ty::{self, TyCheck},
Field, Func, Global, LoggedMem, OffsetIter, ParamAlloc, Reloc, Sig, Struct, SymKey,
TypedReloc, Types, HEADER_SIZE,
},
alloc::{boxed::Box, string::String, vec::Vec},
core::fmt::Display,
};
type Offset = u32;
type Size = u32;
type ArrayLen = u32;
fn load_value(ptr: *const u8, size: u32) -> u64 {
let mut dst = [0u8; 8];
dst[..size as usize]
.copy_from_slice(unsafe { core::slice::from_raw_parts(ptr, size as usize) });
u64::from_ne_bytes(dst)
}
fn ensure_loaded(value: CtValue, derefed: bool, size: u32) -> u64 {
if derefed {
load_value(value.0 as *const u8, size)
} else {
value.0
}
}
mod stack {
use {
super::{Offset, Size},
alloc::vec::Vec,
core::num::NonZeroU32,
};
impl crate::Reloc {
pub fn pack_srel(id: &Id, off: u32) -> u64 {
((id.repr() as u64) << 32) | (off as u64)
}
pub fn apply_stack_offset(&self, code: &mut [u8], stack: &Alloc) {
let bytes =
&code[self.offset as usize + self.sub_offset as usize..][..self.width as usize];
let (id, off) = Self::unpack_srel(u64::from_ne_bytes(bytes.try_into().unwrap()));
self.write_offset(code, stack.final_offset(id, off) as i64);
}
pub fn unpack_srel(id: u64) -> (u32, u32) {
((id >> 32) as u32, id as u32)
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Id(NonZeroU32);
impl Id {
fn index(&self) -> usize {
(self.0.get() as usize - 1) & !(1 << 31)
}
pub fn repr(&self) -> u32 {
self.0.get()
}
pub fn as_ref(&self) -> Self {
Self(unsafe { NonZeroU32::new_unchecked(self.0.get() | 1 << 31) })
}
pub fn is_ref(&self) -> bool {
self.0.get() & (1 << 31) != 0
}
}
impl Drop for Id {
fn drop(&mut self) {
let is_panicking = {
#[cfg(feature = "std")]
{
std::thread::panicking()
}
#[cfg(not(feature = "std"))]
{
false
}
};
if !is_panicking && !self.is_ref() {
unreachable!("stack id leaked: {:?}", self.0);
}
}
}
#[derive(PartialEq)]
struct Meta {
size: Size,
offset: Offset,
rc: u32,
}
#[derive(Default)]
pub struct Alloc {
height: Size,
pub max_height: Size,
meta: Vec,
}
impl Alloc {
pub fn allocate(&mut self, size: Size) -> Id {
self.meta.push(Meta { size, offset: 0, rc: 1 });
self.height += size;
self.max_height = self.max_height.max(self.height);
Id(unsafe { NonZeroU32::new_unchecked(self.meta.len() as u32) })
}
pub fn free(&mut self, id: Id) {
core::mem::forget(id);
//if id.is_ref() {}
//let meta = &mut self.meta[id.index()];
//meta.rc -= 1;
//if meta.rc != 0 {
// return;
//}
//meta.offset = self.height;
//self.height -= meta.size;
}
pub fn dup_id(&mut self, id: &Id) -> Id {
if id.is_ref() {
return id.as_ref();
}
self.meta[id.index()].rc += 1;
Id(id.0)
}
pub fn finalize_leaked(&mut self) {
for meta in self.meta.iter_mut().filter(|m| m.rc > 0) {
meta.offset = self.height;
self.height -= meta.size;
}
}
pub fn clear(&mut self) {
self.height = 0;
self.max_height = 0;
self.meta.clear();
}
pub fn final_offset(&self, id: u32, extra_offset: Offset) -> Offset {
debug_assert_ne!(id, 0);
(self.max_height - self.meta[(id as usize - 1) & !(1 << 31)].offset) + extra_offset
}
}
}
mod reg {
use alloc::vec::Vec;
pub const STACK_PTR: Reg = 254;
pub const ZERO: Reg = 0;
pub const RET: Reg = 1;
pub const RET_ADDR: Reg = 31;
type Reg = u8;
#[cfg(all(debug_assertions, feature = "std"))]
type Bt = std::backtrace::Backtrace;
#[cfg(not(all(debug_assertions, feature = "std")))]
type Bt = ();
#[derive(Default, Debug)]
pub struct Id(Reg, Option);
impl PartialEq for Id {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl Eq for Id {}
impl Id {
pub const RET: Self = Id(RET, None);
pub fn get(&self) -> Reg {
self.0
}
pub fn as_ref(&self) -> Self {
Self(self.0, None)
}
pub fn is_ref(&self) -> bool {
self.1.is_none()
}
}
impl From for Id {
fn from(value: u8) -> Self {
Self(value, None)
}
}
#[cfg(all(debug_assertions, feature = "std"))]
impl Drop for Id {
fn drop(&mut self) {
let is_panicking = {
#[cfg(all(debug_assertions, feature = "std"))]
{
std::thread::panicking()
}
#[cfg(not(all(debug_assertions, feature = "std")))]
{
false
}
};
if !is_panicking && let Some(bt) = self.1.take() {
unreachable!("reg id leaked: {:?} {bt}", self.0);
}
}
}
#[derive(Default, PartialEq, Eq)]
pub struct Alloc {
free: Vec,
max_used: Reg,
}
impl Alloc {
pub fn init(&mut self) {
self.free.clear();
self.free.extend((32..=253).rev());
self.max_used = RET_ADDR;
}
pub fn allocate(&mut self) -> Id {
let reg = self.free.pop().expect("TODO: we need to spill");
self.max_used = self.max_used.max(reg);
Id(
reg,
#[cfg(all(debug_assertions, feature = "std"))]
Some(std::backtrace::Backtrace::capture()),
#[cfg(not(all(debug_assertions, feature = "std")))]
Some(()),
)
}
pub fn free(&mut self, mut reg: Id) {
if reg.1.take().is_some() {
self.free.push(reg.0);
core::mem::forget(reg);
}
}
pub fn pushed_size(&self) -> usize {
((self.max_used as usize).saturating_sub(RET_ADDR as usize) + 1) * 8
}
}
}
struct Value {
ty: ty::Id,
loc: Loc,
}
impl Value {
fn new(ty: impl Into, loc: impl Into) -> Self {
Self { ty: ty.into(), loc: loc.into() }
}
fn void() -> Self {
Self { ty: ty::Id::VOID, loc: Loc::ct(0) }
}
fn imm(value: u64) -> Self {
Self { ty: ty::Id::UINT, loc: Loc::ct(value) }
}
fn ty(ty: ty::Id) -> Self {
Self { ty: ty::Id::TYPE, loc: Loc::ct(ty.repr() as u64) }
}
}
enum LocCow<'a> {
Ref(&'a Loc),
Owned(Loc),
}
impl LocCow<'_> {
fn as_ref(&self) -> &Loc {
match self {
Self::Ref(value) => value,
Self::Owned(value) => value,
}
}
}
impl<'a> From<&'a Loc> for LocCow<'a> {
fn from(value: &'a Loc) -> Self {
Self::Ref(value)
}
}
impl From for LocCow<'_> {
fn from(value: Loc) -> Self {
Self::Owned(value)
}
}
#[repr(packed)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
struct CtValue(u64);
#[derive(Debug, PartialEq, Eq)]
enum Loc {
Rt { derefed: bool, reg: reg::Id, stack: Option, offset: Offset },
Ct { derefed: bool, value: CtValue },
}
impl Loc {
fn stack(stack: stack::Id) -> Self {
Self::Rt { stack: Some(stack), reg: reg::STACK_PTR.into(), derefed: true, offset: 0 }
}
fn reg(reg: impl Into) -> Self {
let reg = reg.into();
assert!(reg.get() != 0);
Self::Rt { derefed: false, reg, stack: None, offset: 0 }
}
fn ct(value: u64) -> Self {
Self::Ct { value: CtValue(value), derefed: false }
}
fn ct_ptr(value: u64) -> Self {
Self::Ct { value: CtValue(value), derefed: true }
}
fn ty(ty: ty::Id) -> Self {
Self::ct(ty.repr() as _)
}
fn offset(mut self, offset: u32) -> Self {
match &mut self {
Self::Rt { offset: off, .. } => *off += offset,
Self::Ct { derefed: false, value } => value.0 += offset as u64,
_ => unreachable!("offseting constant"),
}
self
}
fn as_ref(&self) -> Self {
match *self {
Loc::Rt { derefed, ref reg, ref stack, offset } => Loc::Rt {
derefed,
reg: reg.as_ref(),
stack: stack.as_ref().map(stack::Id::as_ref),
offset,
},
Loc::Ct { value, derefed } => Self::Ct { derefed, value },
}
}
fn into_derefed(mut self) -> Self {
match &mut self {
Self::Rt { derefed, .. } => *derefed = true,
val => unreachable!("{val:?}"),
}
self
}
fn assert_valid(&self) {
assert!(!matches!(self, Self::Rt { reg, .. } if reg.get() == 0));
}
fn take_owned(&mut self) -> Option {
if self.is_ref() {
return None;
}
Some(core::mem::replace(self, self.as_ref()))
}
fn is_ref(&self) -> bool {
matches!(self, Self::Rt { reg, stack, .. } if reg.is_ref() && stack.as_ref().map_or(true, stack::Id::is_ref))
}
fn to_ty(&self) -> Option {
match *self {
Self::Ct { derefed: false, value } => Some(ty::Id::from(value.0)),
Self::Ct { derefed: true, value } => {
Some(unsafe { core::ptr::read(value.0 as *const u8 as _) })
}
Self::Rt { .. } => None,
}
}
fn is_stack(&self) -> bool {
matches!(self, Self::Rt { derefed: true, reg, stack: Some(_), offset: 0 } if reg.get() == STACK_PTR)
}
fn is_reg(&self) -> bool {
matches!(self, Self::Rt { derefed: false, reg: _, stack: None, offset: 0 })
}
}
impl From for Loc {
fn from(reg: reg::Id) -> Self {
Loc::reg(reg)
}
}
impl Default for Loc {
fn default() -> Self {
Self::ct(0)
}
}
#[derive(Clone, Copy)]
struct Loop {
var_count: u32,
offset: u32,
reloc_base: u32,
}
struct Variable {
id: Ident,
value: Value,
}
struct ItemCtxSnap {
stack_relocs: usize,
ret_relocs: usize,
loop_relocs: usize,
code: usize,
relocs: usize,
}
#[derive(Default)]
struct ItemCtx {
file: FileId,
id: ty::Kind,
ret: Option,
ret_reg: reg::Id,
inline_ret_loc: Loc,
task_base: usize,
stack: stack::Alloc,
regs: reg::Alloc,
loops: Vec,
vars: Vec,
stack_relocs: Vec,
ret_relocs: Vec,
loop_relocs: Vec,
code: Vec,
relocs: Vec,
}
impl ItemCtx {
fn write_trap(&mut self, kind: trap::Trap) {
self.emit(eca());
self.code.push(255);
self.code.extend(kind.as_slice());
}
fn snap(&self) -> ItemCtxSnap {
ItemCtxSnap {
stack_relocs: self.stack_relocs.len(),
ret_relocs: self.ret_relocs.len(),
loop_relocs: self.loop_relocs.len(),
code: self.code.len(),
relocs: self.relocs.len(),
}
}
fn revert(&mut self, snap: ItemCtxSnap) {
self.stack_relocs.truncate(snap.stack_relocs);
self.ret_relocs.truncate(snap.ret_relocs);
self.loop_relocs.truncate(snap.loop_relocs);
self.code.truncate(snap.code);
self.relocs.truncate(snap.relocs);
}
fn emit_addi(&mut self, dest: u8, op: u8, delta: u64) {
if delta == 0 {
if dest != op {
self.emit(cp(dest, op));
}
return;
}
self.emit(addi64(dest, op, delta));
}
fn emit(&mut self, (len, instr): (usize, [u8; instrs::MAX_SIZE])) {
self.code.extend_from_slice(&instr[..len]);
}
fn emit_prelude(&mut self) {
self.emit(instrs::addi64(STACK_PTR, STACK_PTR, 0));
self.emit(instrs::st(RET_ADDR, STACK_PTR, 0, 0));
}
fn emit_entry_prelude(&mut self) {
self.emit(jal(RET_ADDR, reg::ZERO, 0));
self.emit(tx());
}
pub fn dup_loc(&mut self, loc: &Loc) -> Loc {
match *loc {
Loc::Rt { derefed, ref reg, ref stack, offset } => Loc::Rt {
reg: reg.as_ref(),
derefed,
stack: stack.as_ref().map(|s| self.stack.dup_id(s)),
offset,
},
ref loc => loc.as_ref(),
}
}
fn finalize(&mut self) {
if let Some(last_ret) = self.ret_relocs.last()
&& last_ret.offset as usize == self.code.len() - 5
{
self.code.truncate(self.code.len() - 5);
self.ret_relocs.pop();
}
let len = self.code.len() as Offset;
self.stack.finalize_leaked();
for rel in self.stack_relocs.drain(..) {
rel.apply_stack_offset(&mut self.code, &self.stack)
}
for rel in self.ret_relocs.drain(..) {
let off = rel.apply_jump(&mut self.code, len, 0);
debug_assert!(off > 0);
}
let pushed = self.regs.pushed_size() as i64;
let stack = self.stack.max_height as i64;
write_reloc(&mut self.code, 3, -(pushed + stack), 8);
write_reloc(&mut self.code, 3 + 8 + 3, stack, 8);
write_reloc(&mut self.code, 3 + 8 + 3 + 8, pushed, 2);
self.emit(instrs::ld(reg::RET_ADDR, reg::STACK_PTR, stack as _, pushed as _));
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, (pushed + stack) as _));
self.stack.clear();
debug_assert!(self.loops.is_empty());
debug_assert!(self.loop_relocs.is_empty());
debug_assert!(self.vars.is_empty());
}
fn free_loc(&mut self, src: impl Into) {
if let LocCow::Owned(Loc::Rt { reg, stack, .. }) = src.into() {
self.regs.free(reg);
if let Some(stack) = stack {
self.stack.free(stack);
}
}
}
}
fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) {
let value = value.to_ne_bytes();
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
}
mod task {
use super::Offset;
pub fn unpack(offset: Offset) -> Result {
if offset >> 31 != 0 {
Err((offset & !(1 << 31)) as usize)
} else {
Ok(offset)
}
}
pub fn id(index: usize) -> Offset {
1 << 31 | index as u32
}
}
#[derive(Debug)]
struct FTask {
file: FileId,
id: ty::Func,
}
#[derive(Default, Debug)]
struct Ctx {
loc: Option,
ty: Option,
check: TyCheck,
}
impl Ctx {
pub fn with_loc(self, loc: Loc) -> Self {
Self { loc: Some(loc), ..self }
}
pub fn with_ty(self, ty: impl Into) -> Self {
Self { ty: Some(ty.into()), ..self }
}
pub fn with_check(self, check: TyCheck) -> Self {
Self { check, ..self }
}
fn into_value(self) -> Option {
Some(Value { ty: self.ty.unwrap(), loc: self.loc? })
}
}
impl From for Ctx {
fn from(value: Value) -> Self {
Self { loc: Some(value.loc), ty: Some(value.ty), ..Default::default() }
}
}
#[derive(Default)]
struct Pool {
cis: Vec,
arg_locs: Vec,
}
const VM_STACK_SIZE: usize = 1024 * 64;
pub struct Comptime {
pub vm: hbvm::Vm,
stack: Box<[u8; VM_STACK_SIZE]>,
code: Vec,
}
impl Comptime {
fn reset(&mut self) {
let ptr = unsafe { self.stack.as_mut_ptr().cast::().add(VM_STACK_SIZE) as u64 };
self.vm.registers.fill(hbvm::value::Value(0));
self.vm.write_reg(STACK_PTR, ptr);
self.vm.pc = hbvm::mem::Address::new(self.code.as_ptr() as u64 + HEADER_SIZE as u64);
}
}
impl Default for Comptime {
fn default() -> Self {
let mut stack = Box::<[u8; VM_STACK_SIZE]>::new_uninit();
let mut vm = hbvm::Vm::default();
let ptr = unsafe { stack.as_mut_ptr().cast::().add(VM_STACK_SIZE) as u64 };
vm.write_reg(STACK_PTR, ptr);
Self { vm, stack: unsafe { stack.assume_init() }, code: Default::default() }
}
}
mod trap {
use {
super::ty,
crate::parser::{ExprRef, FileId},
};
macro_rules! gen_trap {
(
#[derive(Trap)]
$vis:vis enum $name:ident {
$($variant:ident {
$($fname:ident: $fty:ty,)*
},)*
}
) => {
#[repr(u8)]
$vis enum $name {
$($variant($variant),)*
}
impl $name {
$vis fn size(&self) -> usize {
1 + match self {
$(Self::$variant(_) => core::mem::size_of::<$variant>(),)*
}
}
}
$(
#[repr(packed)]
$vis struct $variant {
$($vis $fname: $fty,)*
}
)*
};
}
gen_trap! {
#[derive(Trap)]
pub enum Trap {
MakeStruct {
file: FileId,
struct_expr: ExprRef,
},
MomizedCall {
func: ty::Func,
},
}
}
impl Trap {
pub fn as_slice(&self) -> &[u8] {
unsafe { core::slice::from_raw_parts(self as *const _ as _, self.size()) }
}
}
}
#[derive(Default)]
pub struct Codegen {
pub files: Vec,
pub embeds: Vec>,
tasks: Vec