From fbdabd83144a058c5025ff5e8fa2e9cd93e8ba3a Mon Sep 17 00:00:00 2001 From: mlokr Date: Fri, 13 Sep 2024 18:22:27 +0200 Subject: [PATCH] binary no longer contains comptime code and inoptimized impl is grately simplified --- hblang/src/codegen.rs | 1404 +++++++++-------------------------------- hblang/src/lib.rs | 294 ++++++--- hblang/src/parser.rs | 3 +- hblang/src/son.rs | 114 +--- 4 files changed, 523 insertions(+), 1292 deletions(-) diff --git a/hblang/src/codegen.rs b/hblang/src/codegen.rs index a2adb21..795f307 100644 --- a/hblang/src/codegen.rs +++ b/hblang/src/codegen.rs @@ -6,9 +6,10 @@ use { lexer::TokenKind, log, parser::{self, find_symbol, idfl, CtorField, Expr, ExprRef, FileId, Pos}, - HashMap, LoggedMem, + ty, Field, Func, Global, LoggedMem, ParamAlloc, Reloc, Sig, Struct, SymKey, TypedReloc, + Types, }, - std::{collections::BTreeMap, fmt::Display, ops::Range, rc::Rc}, + std::fmt::Display, }; type Offset = u32; @@ -35,6 +36,23 @@ mod stack { std::num::NonZeroU32, }; + impl crate::Reloc { + pub fn pack_srel(id: &Id, off: u32) -> u64 { + ((id.repr() as u64) << 32) | (off as u64) + } + + pub fn apply_stack_offset(&self, code: &mut [u8], stack: &Alloc) { + let bytes = + &code[self.offset as usize + self.sub_offset as usize..][..self.width as usize]; + let (id, off) = Self::unpack_srel(u64::from_ne_bytes(bytes.try_into().unwrap())); + self.write_offset(code, stack.final_offset(id, off) as i64); + } + + pub fn unpack_srel(id: u64) -> (u32, u32) { + ((id >> 32) as u32, id as u32) + } + } + #[derive(Debug, PartialEq, Eq)] pub struct Id(NonZeroU32); @@ -139,35 +157,51 @@ mod reg { type Reg = u8; - #[derive(Default, Debug, PartialEq, Eq)] - pub struct Id(Reg, bool); + #[cfg(debug_assertions)] + type Bt = std::backtrace::Backtrace; + #[cfg(not(debug_assertions))] + type Bt = (); + + #[derive(Default, Debug)] + pub struct Id(Reg, Option); + + impl PartialEq for Id { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } + } + + impl Eq for Id {} impl Id { - pub const RET: Self = Id(RET, false); + pub const RET: Self = Id(RET, None); pub fn get(&self) -> Reg { self.0 } pub fn as_ref(&self) -> Self { - Self(self.0, false) + Self(self.0, None) } pub fn is_ref(&self) -> bool { - !self.1 + self.1.is_none() } } impl From for Id { fn from(value: u8) -> Self { - Self(value, false) + Self(value, None) } } + #[cfg(debug_assertions)] impl Drop for Id { fn drop(&mut self) { - if !std::thread::panicking() && self.1 { - unreachable!("reg id leaked: {:?}", self.0); + if !std::thread::panicking() + && let Some(bt) = self.1.take() + { + unreachable!("reg id leaked: {:?} {bt}", self.0); } } } @@ -188,11 +222,17 @@ mod reg { pub fn allocate(&mut self) -> Id { let reg = self.free.pop().expect("TODO: we need to spill"); self.max_used = self.max_used.max(reg); - Id(reg, true) + Id( + reg, + #[cfg(debug_assertions)] + Some(std::backtrace::Backtrace::capture()), + #[cfg(not(debug_assertions))] + Some(()), + ) } pub fn free(&mut self, reg: Id) { - if reg.1 { + if reg.1.is_some() { self.free.push(reg.0); std::mem::forget(reg); } @@ -204,496 +244,6 @@ mod reg { } } -pub mod ty { - use { - crate::{ - codegen::ArrayLen, - lexer::TokenKind, - parser::{self, Expr}, - }, - std::{num::NonZeroU32, ops::Range}, - }; - - pub type Builtin = u32; - pub type Struct = u32; - pub type Ptr = u32; - pub type Func = u32; - pub type Global = u32; - pub type Module = u32; - pub type Param = u32; - pub type Slice = u32; - - #[derive(Clone, Copy)] - pub struct Tuple(pub u32); - - impl Tuple { - const LEN_BITS: u32 = 5; - const LEN_MASK: usize = Self::MAX_LEN - 1; - const MAX_LEN: usize = 1 << Self::LEN_BITS; - - pub fn new(pos: usize, len: usize) -> Option { - if len >= Self::MAX_LEN { - return None; - } - - Some(Self((pos << Self::LEN_BITS | len) as u32)) - } - - pub fn view(self, slice: &[Id]) -> &[Id] { - &slice[self.0 as usize >> Self::LEN_BITS..][..self.len()] - } - - pub fn range(self) -> Range { - let start = self.0 as usize >> Self::LEN_BITS; - start..start + self.len() - } - - pub fn len(self) -> usize { - self.0 as usize & Self::LEN_MASK - } - - pub fn is_empty(self) -> bool { - self.0 == 0 - } - - pub fn empty() -> Self { - Self(0) - } - - pub fn repr(&self) -> u32 { - self.0 - } - } - - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] - pub struct Id(NonZeroU32); - - impl Default for Id { - fn default() -> Self { - Self(unsafe { NonZeroU32::new_unchecked(UNDECLARED) }) - } - } - - impl Id { - pub const fn from_bt(bt: u32) -> Self { - Self(unsafe { NonZeroU32::new_unchecked(bt) }) - } - - pub fn is_signed(self) -> bool { - (I8..=INT).contains(&self.repr()) - } - - pub fn is_unsigned(self) -> bool { - (U8..=UINT).contains(&self.repr()) - } - - pub fn is_integer(self) -> bool { - (U8..=INT).contains(&self.repr()) - } - - pub fn strip_pointer(self) -> Self { - match self.expand() { - Kind::Ptr(_) => Kind::Builtin(UINT).compress(), - _ => self, - } - } - - pub fn is_pointer(self) -> bool { - matches!(Kind::from_ty(self), Kind::Ptr(_)) - } - - pub fn try_upcast(self, ob: Self) -> Option { - let (oa, ob) = (Self(self.0.min(ob.0)), Self(self.0.max(ob.0))); - let (a, b) = (oa.strip_pointer(), ob.strip_pointer()); - Some(match () { - _ if oa == ob => oa, - _ if oa.is_pointer() && ob.is_pointer() => return None, - _ if a.is_signed() && b.is_signed() || a.is_unsigned() && b.is_unsigned() => ob, - _ if a.is_unsigned() && b.is_signed() && a.repr() - U8 < b.repr() - I8 => ob, - _ if oa.is_integer() && ob.is_pointer() => ob, - _ => return None, - }) - } - - pub fn expand(self) -> Kind { - Kind::from_ty(self) - } - - pub const fn repr(self) -> u32 { - self.0.get() - } - } - - impl From for Id { - fn from(id: u64) -> Self { - Self(unsafe { NonZeroU32::new_unchecked(id as _) }) - } - } - - impl From for Id { - fn from(id: u32) -> Self { - Kind::Builtin(id).compress() - } - } - - const fn array_to_lower_case(array: [u8; N]) -> [u8; N] { - let mut result = [0; N]; - let mut i = 0; - while i < N { - result[i] = array[i].to_ascii_lowercase(); - i += 1; - } - result - } - // const string to lower case - - macro_rules! builtin_type { - ($($name:ident;)*) => { - $(pub const $name: Builtin = ${index(0)} + 1;)* - - mod __lc_names { - use super::*; - $(pub const $name: &[u8] = &array_to_lower_case(unsafe { - *(stringify!($name).as_ptr() as *const [u8; stringify!($name).len()]) });)* - } - - pub fn from_str(name: &str) -> Option { - match name.as_bytes() { - $(__lc_names::$name => Some($name),)* - _ => None, - } - } - - pub fn to_str(ty: Builtin) -> &'static str { - match ty { - $($name => unsafe { std::str::from_utf8_unchecked(__lc_names::$name) },)* - v => unreachable!("invalid type: {}", v), - } - } - }; - } - - builtin_type! { - UNDECLARED; - NEVER; - VOID; - TYPE; - BOOL; - U8; - U16; - U32; - UINT; - I8; - I16; - I32; - INT; - } - - macro_rules! type_kind { - ($(#[$meta:meta])* $vis:vis enum $name:ident {$( $variant:ident, )*}) => { - $(#[$meta])* - $vis enum $name { - $($variant($variant),)* - } - - impl $name { - const FLAG_BITS: u32 = (${count($variant)} as u32).next_power_of_two().ilog2(); - const FLAG_OFFSET: u32 = std::mem::size_of::() as u32 * 8 - Self::FLAG_BITS; - const INDEX_MASK: u32 = (1 << (32 - Self::FLAG_BITS)) - 1; - - $vis fn from_ty(ty: Id) -> Self { - let (flag, index) = (ty.repr() >> Self::FLAG_OFFSET, ty.repr() & Self::INDEX_MASK); - match flag { - $(${index(0)} => Self::$variant(index),)* - i => unreachable!("{i}"), - } - } - - $vis const fn compress(self) -> Id { - let (index, flag) = match self { - $(Self::$variant(index) => (index, ${index(0)}),)* - }; - Id(unsafe { NonZeroU32::new_unchecked((flag << Self::FLAG_OFFSET) | index) }) - } - - $vis const fn inner(self) -> u32 { - match self { - $(Self::$variant(index) => index,)* - } - } - } - }; - } - - type_kind! { - #[derive(Debug, Clone, Copy, PartialEq, Eq)] - pub enum Kind { - Builtin, - Struct, - Ptr, - Func, - Global, - Module, - Slice, - } - } - - impl Default for Kind { - fn default() -> Self { - Self::Builtin(UNDECLARED) - } - } - - pub struct Display<'a> { - tys: &'a super::Types, - files: &'a [parser::Ast], - ty: Id, - } - - impl<'a> Display<'a> { - pub(super) fn new(tys: &'a super::Types, files: &'a [parser::Ast], ty: Id) -> Self { - Self { tys, files, ty } - } - - fn rety(&self, ty: Id) -> Self { - Self::new(self.tys, self.files, ty) - } - } - - impl<'a> std::fmt::Display for Display<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - use Kind as TK; - match TK::from_ty(self.ty) { - TK::Module(idx) => write!(f, "module{}", idx), - TK::Builtin(ty) => write!(f, "{}", to_str(ty)), - TK::Ptr(ty) => { - write!(f, "^{}", self.rety(self.tys.ptrs[ty as usize].base)) - } - _ if let Some((key, _)) = self - .tys - .syms - .iter() - .find(|(sym, &ty)| sym.file < self.files.len() as u32 && ty == self.ty) - && let Some(name) = self.files[key.file as usize].exprs().iter().find_map( - |expr| match expr { - Expr::BinOp { - left: &Expr::Ident { name, id, .. }, - op: TokenKind::Decl, - .. - } if id == key.ident => Some(name), - _ => None, - }, - ) => - { - write!(f, "{name}") - } - TK::Struct(idx) => { - let record = &self.tys.structs[idx as usize]; - write!(f, "{{")?; - for (i, &super::Field { ref name, ty }) in record.fields.iter().enumerate() { - if i != 0 { - write!(f, ", ")?; - } - write!(f, "{name}: {}", self.rety(ty))?; - } - write!(f, "}}") - } - TK::Func(idx) => write!(f, "fn{idx}"), - TK::Global(idx) => write!(f, "global{idx}"), - TK::Slice(idx) => { - let array = self.tys.arrays[idx as usize]; - match array.len { - ArrayLen::MAX => write!(f, "[{}]", self.rety(array.ty)), - len => write!(f, "[{}; {len}]", self.rety(array.ty)), - } - } - } - } - } -} - -#[derive(Default)] -struct Types { - syms: HashMap, - - funcs: Vec, - args: Vec, - globals: Vec, - structs: Vec, - ptrs: Vec, - arrays: Vec, -} - -impl Types { - pub fn offset_of_item(&self, item: ty::Kind, ct_hint: Option) -> Option { - task::unpack(match item { - ty::Kind::Func(f) => self.funcs[f as usize].offset, - ty::Kind::Global(g) => self.globals[g as usize].offset, - ty::Kind::Builtin(u32::MAX) => ct_hint?, - _ => unreachable!(), - }) - .ok() - } - - pub fn is_runtime_item(&self, item: ty::Kind) -> bool { - match item { - ty::Kind::Func(f) => self.funcs[f as usize].runtime, - ty::Kind::Global(f) => self.globals[f as usize].runtime, - ty::Kind::Builtin(u32::MAX) => false, - _ => unreachable!(), - } - } - - fn parama(&self, ret: impl Into) -> ParamAlloc { - ParamAlloc(2 + (9..=16).contains(&self.size_of(ret.into())) as u8..12) - } - - fn offset_of(&self, idx: ty::Struct, field: &str) -> Option<(Offset, ty::Id)> { - let record = &self.structs[idx as usize]; - let until = record.fields.iter().position(|f| f.name.as_ref() == field)?; - let mut offset = 0; - for &Field { ty, .. } in &record.fields[..until] { - offset = Self::align_up(offset, self.align_of(ty)); - offset += self.size_of(ty); - } - Some((offset, record.fields[until].ty)) - } - - fn make_ptr(&mut self, base: ty::Id) -> ty::Id { - ty::Kind::Ptr(self.make_ptr_low(base)).compress() - } - - fn make_ptr_low(&mut self, base: ty::Id) -> ty::Ptr { - let id = SymKey::pointer_to(base); - - self.syms - .entry(id) - .or_insert_with(|| { - self.ptrs.push(Ptr { base }); - ty::Kind::Ptr(self.ptrs.len() as u32 - 1).compress() - }) - .expand() - .inner() - } - - fn make_array(&mut self, ty: ty::Id, len: ArrayLen) -> ty::Id { - ty::Kind::Slice(self.make_array_low(ty, len)).compress() - } - - fn make_array_low(&mut self, ty: ty::Id, len: ArrayLen) -> ty::Slice { - let id = SymKey { - file: match len { - ArrayLen::MAX => ArrayLen::MAX - 1, - len => ArrayLen::MAX - len - 2, - }, - ident: ty.repr(), - }; - - self.syms - .entry(id) - .or_insert_with(|| { - self.arrays.push(Array { ty, len }); - ty::Kind::Slice(self.arrays.len() as u32 - 1).compress() - }) - .expand() - .inner() - } - - fn align_up(value: Size, align: Size) -> Size { - (value + align - 1) & !(align - 1) - } - - fn size_of(&self, ty: ty::Id) -> Size { - match ty.expand() { - ty::Kind::Ptr(_) => 8, - ty::Kind::Builtin(ty::VOID) => 0, - ty::Kind::Builtin(ty::NEVER) => unreachable!(), - ty::Kind::Builtin(ty::INT | ty::UINT) => 8, - ty::Kind::Builtin(ty::I32 | ty::U32 | ty::TYPE) => 4, - ty::Kind::Builtin(ty::I16 | ty::U16) => 2, - ty::Kind::Builtin(ty::I8 | ty::U8 | ty::BOOL) => 1, - ty::Kind::Slice(arr) => { - let arr = &self.arrays[arr as usize]; - match arr.len { - 0 => 0, - ArrayLen::MAX => 16, - len => self.size_of(arr.ty) * len, - } - } - ty::Kind::Struct(stru) => { - let mut offset = 0u32; - let record = &self.structs[stru as usize]; - for &Field { ty, .. } in record.fields.iter() { - let align = self.align_of(ty); - offset = Self::align_up(offset, align); - offset += self.size_of(ty); - } - offset - } - ty => unimplemented!("size_of: {:?}", ty), - } - } - - fn align_of(&self, ty: ty::Id) -> Size { - match ty.expand() { - ty::Kind::Struct(stru) => self.structs[stru as usize] - .fields - .iter() - .map(|&Field { ty, .. }| self.align_of(ty)) - .max() - .unwrap(), - ty::Kind::Slice(arr) => { - let arr = &self.arrays[arr as usize]; - match arr.len { - ArrayLen::MAX => 8, - _ => self.align_of(arr.ty), - } - } - _ => self.size_of(ty).max(1), - } - } -} - -#[derive(Clone, Copy, Debug)] -struct Reloc { - offset: Offset, - sub_offset: u8, - width: u8, -} - -impl Reloc { - fn new(offset: u32, sub_offset: u8, width: u8) -> Self { - Self { offset, sub_offset, width } - } - - fn apply_stack_offset(&self, code: &mut [u8], stack: &stack::Alloc) { - let bytes = &code[self.offset as usize + self.sub_offset as usize..][..self.width as usize]; - let (id, off) = Self::unpack_srel(u64::from_ne_bytes(bytes.try_into().unwrap())); - self.write_offset(code, stack.final_offset(id, off) as i64); - } - - fn pack_srel(id: &stack::Id, off: u32) -> u64 { - ((id.repr() as u64) << 32) | (off as u64) - } - - fn unpack_srel(id: u64) -> (u32, u32) { - ((id >> 32) as u32, id as u32) - } - - fn apply_jump(mut self, code: &mut [u8], to: u32, from: u32) -> i64 { - self.offset += from; - let offset = to as i64 - self.offset as i64; - self.write_offset(code, offset); - offset - } - - fn write_offset(&self, code: &mut [u8], offset: i64) { - let bytes = offset.to_ne_bytes(); - let slice = &mut code[self.offset as usize + self.sub_offset as usize..]; - slice[..self.width as usize].copy_from_slice(&bytes[..self.width as usize]); - } -} - struct Value { ty: ty::Id, loc: Loc, @@ -865,6 +415,14 @@ struct Variable { value: Value, } +struct ItemCtxSnap { + stack_relocs: usize, + ret_relocs: usize, + loop_relocs: usize, + code: usize, + relocs: usize, +} + #[derive(Default)] struct ItemCtx { file: FileId, @@ -874,218 +432,45 @@ struct ItemCtx { inline_ret_loc: Loc, task_base: usize, - snap: Snapshot, stack: stack::Alloc, regs: reg::Alloc, + loops: Vec, + vars: Vec, + stack_relocs: Vec, ret_relocs: Vec, loop_relocs: Vec, - loops: Vec, - vars: Vec, + code: Vec, + relocs: Vec, } impl ItemCtx { - pub fn dup_loc(&mut self, loc: &Loc) -> Loc { - match *loc { - Loc::Rt { derefed, ref reg, ref stack, offset } => Loc::Rt { - reg: reg.as_ref(), - derefed, - stack: stack.as_ref().map(|s| self.stack.dup_id(s)), - offset, - }, - ref loc => loc.as_ref(), + fn write_trap(&mut self, kind: trap::Trap) { + self.emit(eca()); + self.code.push(255); + self.code.extend(kind.as_slice()); + } + + fn snap(&self) -> ItemCtxSnap { + ItemCtxSnap { + stack_relocs: self.stack_relocs.len(), + ret_relocs: self.ret_relocs.len(), + loop_relocs: self.loop_relocs.len(), + code: self.code.len(), + relocs: self.relocs.len(), } } - fn finalize(&mut self, output: &mut Output) { - let base = self.snap.code as Offset; - - if let Some(last_ret) = self.ret_relocs.last() - && (last_ret.offset + base) as usize == output.code.len() - 5 - { - output.code.truncate(output.code.len() - 5); - self.ret_relocs.pop(); - } - - let len = output.code.len() as Offset; - - self.stack.finalize_leaked(); - for rel in self.stack_relocs.drain(..) { - rel.apply_stack_offset(&mut output.code[base as usize..], &self.stack) - } - - for rel in self.ret_relocs.drain(..) { - let off = rel.apply_jump(&mut output.code, len, base); - debug_assert!(off > 0); - } - - self.finalize_frame(output); - self.stack.clear(); - - debug_assert!(self.loops.is_empty()); - debug_assert!(self.loop_relocs.is_empty()); - debug_assert!(self.vars.is_empty()); + fn revert(&mut self, snap: ItemCtxSnap) { + self.stack_relocs.truncate(snap.stack_relocs); + self.ret_relocs.truncate(snap.ret_relocs); + self.loop_relocs.truncate(snap.loop_relocs); + self.code.truncate(snap.code); + self.relocs.truncate(snap.relocs); } - fn finalize_frame(&mut self, output: &mut Output) { - let mut cursor = self.snap.code; - let mut allocate = |size| (cursor += size, cursor).1; - - let pushed = self.regs.pushed_size() as i64; - let stack = self.stack.max_height as i64; - - let mut exmpl = Output::default(); - exmpl.emit_prelude(); - - debug_assert_eq!(exmpl.code.as_slice(), &output.code[self.snap.code..][..exmpl.code.len()],); - - write_reloc(&mut output.code, allocate(3), -(pushed + stack), 8); - write_reloc(&mut output.code, allocate(8 + 3), stack, 8); - write_reloc(&mut output.code, allocate(8), pushed, 2); - - output.emit(ld(RET_ADDR, STACK_PTR, stack as _, pushed as _)); - output.emit(addi64(STACK_PTR, STACK_PTR, (pushed + stack) as _)); - } - - fn free_loc(&mut self, src: impl Into) { - if let LocCow::Owned(Loc::Rt { reg, stack, .. }) = src.into() { - self.regs.free(reg); - if let Some(stack) = stack { - self.stack.free(stack); - } - } - } -} - -fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) { - let value = value.to_ne_bytes(); - doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]); -} - -#[derive(PartialEq, Eq, Hash)] -struct SymKey { - file: u32, - ident: u32, -} - -impl SymKey { - pub fn pointer_to(ty: ty::Id) -> Self { - Self { file: u32::MAX, ident: ty.repr() } - } -} - -#[derive(Clone, Copy)] -struct Sig { - args: ty::Tuple, - ret: ty::Id, -} - -#[derive(Clone, Copy)] -struct Func { - file: FileId, - expr: ExprRef, - sig: Option, - runtime: bool, - offset: Offset, - size: Size, -} - -struct Global { - offset: Offset, - ty: ty::Id, - runtime: bool, - _file: FileId, - _name: Ident, -} - -struct Field { - name: Rc, - ty: ty::Id, -} - -struct Struct { - fields: Rc<[Field]>, -} - -struct Ptr { - base: ty::Id, -} - -struct ParamAlloc(Range); - -impl ParamAlloc { - pub fn next(&mut self) -> u8 { - self.0.next().expect("too many paramteters") - } - - fn next_wide(&mut self) -> u8 { - (self.next(), self.next()).0 - } -} - -#[derive(Clone, Copy)] -struct Array { - ty: ty::Id, - len: ArrayLen, -} - -mod task { - use super::Offset; - - pub fn unpack(offset: Offset) -> Result { - if offset >> 31 != 0 { - Err((offset & !(1 << 31)) as usize) - } else { - Ok(offset) - } - } - - pub fn id(index: usize) -> Offset { - 1 << 31 | index as u32 - } -} - -#[derive(Debug)] -struct FTask { - file: FileId, - id: ty::Func, -} - -#[derive(Default, Clone, Copy, PartialEq, Eq, Debug)] -pub struct Snapshot { - code: usize, - relocs: usize, -} - -impl Snapshot { - fn _sub(&mut self, other: &Self) { - self.code -= other.code; - self.relocs -= other.relocs; - } - - fn _add(&mut self, other: &Self) { - self.code += other.code; - self.relocs += other.relocs; - } -} - -struct OutReloc { - from: ty::Kind, - to: ty::Kind, - rel: Reloc, -} - -#[derive(Default)] -struct Output { - code: Vec, - string_data: Vec, - relocs: Vec, - strings: Vec, -} - -impl Output { fn emit_addi(&mut self, dest: u8, op: u8, delta: u64) { if delta == 0 { if dest != op { @@ -1118,29 +503,90 @@ impl Output { self.emit(tx()); } - fn append(&mut self, val: &mut Self) { - val.pop(self, &Snapshot::default()); + pub fn dup_loc(&mut self, loc: &Loc) -> Loc { + match *loc { + Loc::Rt { derefed, ref reg, ref stack, offset } => Loc::Rt { + reg: reg.as_ref(), + derefed, + stack: stack.as_ref().map(|s| self.stack.dup_id(s)), + offset, + }, + ref loc => loc.as_ref(), + } } - fn pop(&mut self, stash: &mut Self, snap: &Snapshot) { - stash.code.extend(self.code.drain(snap.code..)); - stash.relocs.extend(self.relocs.drain(snap.relocs..)); + fn finalize(&mut self) { + if let Some(last_ret) = self.ret_relocs.last() + && last_ret.offset as usize == self.code.len() - 5 + { + self.code.truncate(self.code.len() - 5); + self.ret_relocs.pop(); + } + + let len = self.code.len() as Offset; + + self.stack.finalize_leaked(); + for rel in self.stack_relocs.drain(..) { + rel.apply_stack_offset(&mut self.code, &self.stack) + } + + for rel in self.ret_relocs.drain(..) { + let off = rel.apply_jump(&mut self.code, len, 0); + debug_assert!(off > 0); + } + + let pushed = self.regs.pushed_size() as i64; + let stack = self.stack.max_height as i64; + + write_reloc(&mut self.code, 3, -(pushed + stack), 8); + write_reloc(&mut self.code, 3 + 8 + 3, stack, 8); + write_reloc(&mut self.code, 3 + 8 + 3 + 8, pushed, 2); + + self.emit(instrs::ld(reg::RET_ADDR, reg::STACK_PTR, stack as _, pushed as _)); + self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, (pushed + stack) as _)); + + self.stack.clear(); + + debug_assert!(self.loops.is_empty()); + debug_assert!(self.loop_relocs.is_empty()); + debug_assert!(self.vars.is_empty()); } - fn trunc(&mut self, snap: &Snapshot) { - self.code.truncate(snap.code); - self.relocs.truncate(snap.relocs); + fn free_loc(&mut self, src: impl Into) { + if let LocCow::Owned(Loc::Rt { reg, stack, .. }) = src.into() { + self.regs.free(reg); + if let Some(stack) = stack { + self.stack.free(stack); + } + } + } +} + +fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) { + let value = value.to_ne_bytes(); + doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]); +} + +mod task { + use super::Offset; + + pub fn unpack(offset: Offset) -> Result { + if offset >> 31 != 0 { + Err((offset & !(1 << 31)) as usize) + } else { + Ok(offset) + } } - fn write_trap(&mut self, kind: trap::Trap) { - self.emit(eca()); - self.code.push(255); - self.code.extend(kind.as_slice()); + pub fn id(index: usize) -> Offset { + 1 << 31 | index as u32 } +} - fn snap(&mut self) -> Snapshot { - Snapshot { code: self.code.len(), relocs: self.relocs.len() } - } +#[derive(Debug)] +struct FTask { + file: FileId, + id: ty::Func, } #[derive(Default, Debug)] @@ -1172,7 +618,6 @@ impl From for Ctx { #[derive(Default)] struct Pool { cis: Vec, - outputs: Vec, arg_locs: Vec, } @@ -1180,8 +625,8 @@ const VM_STACK_SIZE: usize = 1024 * 1024 * 2; struct Comptime { vm: hbvm::Vm, - depth: usize, _stack: Box<[u8; VM_STACK_SIZE]>, + code: Vec, } impl Default for Comptime { @@ -1190,21 +635,7 @@ impl Default for Comptime { let mut vm = hbvm::Vm::default(); let ptr = unsafe { stack.as_mut_ptr().cast::().add(VM_STACK_SIZE) as u64 }; vm.write_reg(STACK_PTR, ptr); - Self { vm, depth: 0, _stack: unsafe { stack.assume_init() } } - } -} - -impl Comptime { - fn active(&self) -> bool { - self.depth != 0 - } - - fn enter(&mut self) { - self.depth += 1; - } - - fn exit(&mut self) { - self.depth -= 1; + Self { vm, _stack: unsafe { stack.assume_init() }, code: Default::default() } } } @@ -1265,14 +696,6 @@ mod trap { } } -struct StringReloc { - // TODO: change to ty::Id - from: ty::Kind, - reloc: Reloc, - range: std::ops::Range, - shifted: bool, -} - #[derive(Default)] pub struct Codegen { pub files: Vec, @@ -1280,24 +703,16 @@ pub struct Codegen { tys: Types, ci: ItemCtx, - output: Output, pool: Pool, ct: Comptime, } impl Codegen { pub fn generate(&mut self) { - self.output.emit_entry_prelude(); + self.ci.emit_entry_prelude(); self.find_or_declare(0, 0, Err("main"), ""); self.make_func_reachable(0); - self.complete_call_graph_low(); - self.link(); - } - - pub fn dump(&mut self, out: &mut impl std::io::Write) -> std::io::Result<()> { - Reloc::new(0, 3, 4).apply_jump(&mut self.output.code, self.tys.funcs[0].offset, 0); - self.link(); - out.write_all(&self.output.code) + self.complete_call_graph(); } fn expr(&mut self, expr: &Expr) -> Option { @@ -1399,9 +814,9 @@ impl Codegen { let idx = self.loc_to_reg(index_val.loc, 8); if item_size != 1 { - self.output.emit(muli64(idx.get(), idx.get(), item_size as _)); + self.ci.emit(muli64(idx.get(), idx.get(), item_size as _)); } - self.output.emit(add64(reg.get(), reg.get(), idx.get())); + self.ci.emit(add64(reg.get(), reg.get(), idx.get())); self.ci.regs.free(idx); Some(Value::new(ty, base_val.loc)) @@ -1424,7 +839,7 @@ impl Codegen { self.report(func_ast.pos(), "first argument of inline needs to be a function"); }; - let fuc = self.tys.funcs[func as usize]; + let fuc = &self.tys.funcs[func as usize]; let fast = self.files[fuc.file as usize].clone(); let E::BinOp { right: &E::Closure { args: cargs, body, .. }, .. } = fuc.expr.get(&fast).unwrap() @@ -1456,6 +871,7 @@ impl Codegen { let loc = self.alloc_ret(sig.ret, ctx, true); let prev_ret_reg = std::mem::replace(&mut self.ci.inline_ret_loc, loc); + let fuc = &self.tys.funcs[func as usize]; let prev_file = std::mem::replace(&mut self.ci.file, fuc.file); let prev_ret = std::mem::replace(&mut self.ci.ret, Some(sig.ret)); self.expr(body); @@ -1468,14 +884,14 @@ impl Codegen { } if let Some(last_ret) = self.ci.ret_relocs.last() - && last_ret.offset as usize + self.ci.snap.code == self.output.code.len() - 5 + && last_ret.offset as usize == self.ci.code.len() - 5 { - self.output.code.truncate(self.output.code.len() - 5); + self.ci.code.truncate(self.ci.code.len() - 5); self.ci.ret_relocs.pop(); } - let len = self.output.code.len() as u32; + let len = self.ci.code.len() as u32; for rel in self.ci.ret_relocs.drain(ret_reloc_base..) { - rel.apply_jump(&mut self.output.code, len, self.ci.snap.code as u32); + rel.apply_jump(&mut self.ci.code, len, 0); } return Some(Value { ty: sig.ret, loc }); @@ -1499,7 +915,7 @@ impl Codegen { let loc = self.alloc_ret(ty, ctx, false); - self.output.emit(eca()); + self.ci.emit(eca()); self.load_ret(ty, &loc); @@ -1528,7 +944,7 @@ impl Codegen { if from_size < to_size && val.ty.is_signed() { let reg = self.loc_to_reg(val.loc, from_size); let op = [sxt8, sxt16, sxt32][from_size.ilog2() as usize]; - self.output.emit(op(reg.get(), reg.get())); + self.ci.emit(op(reg.get(), reg.get())); val.loc = Loc::reg(reg); } @@ -1611,38 +1027,38 @@ impl Codegen { self.report(pos, "string literal must end with null byte (for now)"); } - let report = |s: &Codegen, bytes: &std::str::Bytes, message| { - s.report(pos + (literal.len() - bytes.len()) as u32 - 1, message) + let report = |bytes: &std::str::Bytes, message| { + self.report(pos + (literal.len() - bytes.len()) as u32 - 1, message) }; - let start = self.output.string_data.len(); + let mut str = Vec::::with_capacity(literal.len()); - let decode_braces = |s: &mut Codegen, bytes: &mut std::str::Bytes| { + let decode_braces = |str: &mut Vec, bytes: &mut std::str::Bytes| { while let Some(b) = bytes.next() && b != b'}' { let c = bytes .next() - .unwrap_or_else(|| report(s, bytes, "incomplete escape sequence")); - let decode = |s: &Codegen, b: u8| match b { + .unwrap_or_else(|| report(bytes, "incomplete escape sequence")); + let decode = |b: u8| match b { b'0'..=b'9' => b - b'0', b'a'..=b'f' => b - b'a' + 10, b'A'..=b'F' => b - b'A' + 10, - _ => report(s, bytes, "expected hex digit or '}'"), + _ => report(bytes, "expected hex digit or '}'"), }; - s.output.string_data.push(decode(s, b) << 4 | decode(s, c)); + str.push(decode(b) << 4 | decode(c)); } }; let mut bytes = literal.bytes(); while let Some(b) = bytes.next() { if b != b'\\' { - self.output.string_data.push(b); + str.push(b); continue; } let b = match bytes .next() - .unwrap_or_else(|| report(self, &bytes, "incomplete escape sequence")) + .unwrap_or_else(|| report(&bytes, "incomplete escape sequence")) { b'n' => b'\n', b'r' => b'\r', @@ -1652,24 +1068,22 @@ impl Codegen { b'"' => b'"', b'0' => b'\0', b'{' => { - decode_braces(self, &mut bytes); + decode_braces(&mut str, &mut bytes); continue; } - _ => report(self, &bytes, "unknown escape sequence, expected [nrt\\\"'{0]"), + _ => report(&bytes, "unknown escape sequence, expected [nrt\\\"'{0]"), }; - self.output.string_data.push(b); + str.push(b); } - let range = start as _..self.output.string_data.len() as _; - let reloc = Reloc::new(self.local_offset() as _, 3, 4); - self.output.strings.push(StringReloc { - from: self.ci.id, - reloc, - range, - shifted: false, - }); + let reloc = Reloc::new(self.ci.code.len() as _, 3, 4); + let glob = self.tys.globals.len() as ty::Global; + self.tys.globals.push(Global { data: str, ..Default::default() }); + self.ci + .relocs + .push(TypedReloc { target: ty::Kind::Global(glob).compress(), reloc }); let reg = self.ci.regs.allocate(); - self.output.emit(instrs::lra(reg.get(), 0, 0)); + self.ci.emit(instrs::lra(reg.get(), 0, 0)); Some(Value::new(self.tys.make_ptr(ty::U8.into()), reg)) } E::Ctor { pos, ty, fields, .. } => { @@ -1731,7 +1145,7 @@ impl Codegen { return Some(Value { ty, loc }); } E::Field { target, name: field } => { - let checkpoint = self.local_snap(); + let checkpoint = self.ci.snap(); let mut tal = self.expr(target)?; if let ty::Kind::Ptr(ty) = tal.ty.expand() { @@ -1748,7 +1162,7 @@ impl Codegen { } ty::Kind::Builtin(ty::TYPE) => { self.ci.free_loc(tal.loc); - self.pop_local_snap(checkpoint); + self.ci.revert(checkpoint); match ty::Kind::from_ty(self.ty(target)) { ty::Kind::Module(idx) => { match self.find_or_declare(target.pos(), idx, Err(field), "") { @@ -1840,7 +1254,7 @@ impl Codegen { //self.output.trunc(&snap); self.ci.vars.truncate(scope); - let fuc = self.tys.funcs[func as usize]; + let fuc = &self.tys.funcs[func as usize]; let ast = self.files[fuc.file as usize].clone(); let E::BinOp { right: &E::Closure { args: cargs, .. }, .. } = fuc.expr.get(&ast).unwrap() @@ -1878,20 +1292,16 @@ impl Codegen { let loc = self.alloc_ret(sig.ret, ctx, true); if should_momize { - self.output.write_trap(trap::Trap::MomizedCall(trap::MomizedCall { func })); + self.ci.write_trap(trap::Trap::MomizedCall(trap::MomizedCall { func })); } - let rel = Reloc::new(self.local_offset(), 3, 4); - self.output.relocs.push(OutReloc { - from: self.ci.id, - to: ty::Kind::Func(func), - rel, - }); - self.output.emit(jal(RET_ADDR, ZERO, 0)); + let reloc = Reloc::new(self.ci.code.len(), 3, 4); + self.ci.relocs.push(TypedReloc { target: ty::Kind::Func(func).compress(), reloc }); + self.ci.emit(jal(RET_ADDR, ZERO, 0)); self.make_func_reachable(func); if should_momize { - self.output.emit(tx()); + self.ci.emit(tx()); } self.load_ret(sig.ret, &loc); @@ -1944,8 +1354,8 @@ impl Codegen { Some(ret) => _ = self.assert_ty(pos, ty, ret, "return type"), } - self.ci.ret_relocs.push(Reloc::new(self.local_offset(), 1, 4)); - self.output.emit(jmp(0)); + self.ci.ret_relocs.push(Reloc::new(self.ci.code.len(), 1, 4)); + self.ci.emit(jmp(0)); None } E::Block { stmts, .. } => { @@ -2010,7 +1420,7 @@ impl Codegen { let left_reg = self.loc_to_reg(&left.loc, lsize); let right_reg = self.loc_to_reg(&right.loc, rsize); jump_offset = self.local_offset(); - self.output.emit(op(left_reg.get(), right_reg.get(), 0)); + self.ci.emit(op(left_reg.get(), right_reg.get(), 0)); self.ci.free_loc(left.loc); self.ci.free_loc(right.loc); self.ci.regs.free(left_reg); @@ -2022,7 +1432,7 @@ impl Codegen { let cond = self.expr_ctx(cond, Ctx::default().with_ty(ty::BOOL))?; let reg = self.loc_to_reg(&cond.loc, 1); jump_offset = self.local_offset(); - self.output.emit(jeq(reg.get(), 0, 0)); + self.ci.emit(jeq(reg.get(), 0, 0)); self.ci.free_loc(cond.loc); self.ci.regs.free(reg); } @@ -2036,7 +1446,7 @@ impl Codegen { if let Some(else_) = else_ { let else_jump_offset = self.local_offset(); if !then_unreachable { - self.output.emit(jmp(0)); + self.ci.emit(jmp(0)); jump = self.local_offset() as i64 - jump_offset as i64; } @@ -2063,16 +1473,15 @@ impl Codegen { if !body_unreachable { let loop_end = self.local_offset(); - self.output.emit(jmp(loop_start as i32 - loop_end as i32)); + self.ci.emit(jmp(loop_start as i32 - loop_end as i32)); } - let loop_end = self.output.code.len() as u32; + let loop_end = self.ci.code.len() as u32; let loopa = self.ci.loops.pop().unwrap(); let is_unreachable = loopa.reloc_base == self.ci.loop_relocs.len() as u32; for reloc in self.ci.loop_relocs.drain(loopa.reloc_base as usize..) { - let off = - reloc.apply_jump(&mut self.output.code, loop_end, self.ci.snap.code as _); + let off = reloc.apply_jump(&mut self.ci.code, loop_end, 0); debug_assert!(off > 0); } @@ -2089,30 +1498,30 @@ impl Codegen { Some(Value::void()) } E::Break { .. } => { - self.ci.loop_relocs.push(Reloc::new(self.local_offset(), 1, 4)); - self.output.emit(jmp(0)); + self.ci.loop_relocs.push(Reloc::new(self.ci.code.len(), 1, 4)); + self.ci.emit(jmp(0)); None } E::Continue { .. } => { let loop_ = self.ci.loops.last().unwrap(); let offset = self.local_offset(); - self.output.emit(jmp(loop_.offset as i32 - offset as i32)); + self.ci.emit(jmp(loop_.offset as i32 - offset as i32)); None } E::BinOp { left, op: op @ (T::And | T::Or), right } => { let lhs = self.expr_ctx(left, Ctx::default().with_ty(ty::BOOL))?; let lhs = self.loc_to_reg(lhs.loc, 1); - let jump_offset = self.output.code.len() + 3; + let jump_offset = self.ci.code.len() + 3; let op = if op == T::And { jeq } else { jne }; - self.output.emit(op(lhs.get(), 0, 0)); + self.ci.emit(op(lhs.get(), 0, 0)); if let Some(rhs) = self.expr_ctx(right, Ctx::default().with_ty(ty::BOOL)) { let rhs = self.loc_to_reg(rhs.loc, 1); - self.output.emit(cp(lhs.get(), rhs.get())); + self.ci.emit(cp(lhs.get(), rhs.get())); } - let jump = self.output.code.len() as i64 - jump_offset as i64; - write_reloc(&mut self.output.code, jump_offset, jump, 2); + let jump = self.ci.code.len() as i64 - jump_offset as i64; + write_reloc(&mut self.ci.code, jump_offset, jump, 2); Some(Value { ty: ty::BOOL.into(), loc: Loc::reg(lhs) }) } @@ -2177,7 +1586,7 @@ impl Codegen { imm *= size as u64; } - self.output.emit(oper(dst.get(), lhs.get(), imm)); + self.ci.emit(oper(dst.get(), lhs.get(), imm)); self.ci.regs.free(lhs); self.ci.free_loc(drop_loc); break 'ops Some(Value::new(ty, dst)); @@ -2195,7 +1604,7 @@ impl Codegen { rhs.get() }; let op = [sxt8, sxt16, sxt32][min_size.ilog2() as usize]; - self.output.emit(op(operand, operand)); + self.ci.emit(op(operand, operand)); } if left.ty.is_pointer() ^ right.ty.is_pointer() { @@ -2210,12 +1619,12 @@ impl Codegen { let ty::Kind::Ptr(ty) = ty.expand() else { unreachable!() }; let size = self.tys.size_of(self.tys.ptrs[ty as usize].base); - self.output.emit(muli64(offset, offset, size as _)); + self.ci.emit(muli64(offset, offset, size as _)); } } if let Some(op) = Self::math_op(op, signed, size) { - self.output.emit(op(dst.get(), lhs.get(), rhs.get())); + self.ci.emit(op(dst.get(), lhs.get(), rhs.get())); self.ci.regs.free(lhs); self.ci.regs.free(rhs); self.ci.free_loc(right.loc); @@ -2232,10 +1641,10 @@ impl Codegen { }; let op_fn = if signed { cmps } else { cmpu }; - self.output.emit(op_fn(dst.get(), lhs.get(), rhs.get())); - self.output.emit(cmpui(dst.get(), dst.get(), against)); + self.ci.emit(op_fn(dst.get(), lhs.get(), rhs.get())); + self.ci.emit(cmpui(dst.get(), dst.get(), against)); if matches!(op, T::Eq | T::Lt | T::Gt) { - self.output.emit(not(dst.get(), dst.get())); + self.ci.emit(not(dst.get(), dst.get())); } self.ci.regs.free(lhs); @@ -2265,7 +1674,7 @@ impl Codegen { } fn compute_signature(&mut self, func: &mut ty::Func, pos: Pos, args: &[Expr]) -> Option { - let fuc = self.tys.funcs[*func as usize]; + let fuc = &self.tys.funcs[*func as usize]; let fast = self.files[fuc.file as usize].clone(); let Expr::BinOp { right: &Expr::Closure { args: cargs, ret, .. }, .. } = fuc.expr.get(&fast).unwrap() @@ -2309,13 +1718,12 @@ impl Codegen { let sym = SymKey { file: !args.repr(), ident: ty::Kind::Func(*func).compress().repr() }; let ct = || { let func_id = self.tys.funcs.len(); + let fuc = &self.tys.funcs[*func as usize]; self.tys.funcs.push(Func { file: fuc.file, - offset: u32::MAX, - size: 0, - runtime: false, sig: Some(Sig { args, ret }), expr: fuc.expr, + ..Default::default() }); ty::Kind::Func(func_id as _).compress() @@ -2331,14 +1739,12 @@ impl Codegen { } fn infer_type(&mut self, expr: &Expr) -> ty::Id { - let mut snap = self.output.snap(); - snap._sub(&self.ci.snap); + // FIXME: very inneficient let mut ci = ItemCtx { file: self.ci.file, id: self.ci.id, ret: self.ci.ret, task_base: self.ci.task_base, - snap: self.ci.snap, loops: self.ci.loops.clone(), vars: self .ci @@ -2360,9 +1766,6 @@ impl Codegen { let value = self.expr(expr).unwrap(); self.ci.free_loc(value.loc); std::mem::swap(&mut self.ci, &mut ci); - self.ci.snap = ci.snap; - snap._add(&self.ci.snap); - self.output.trunc(&snap); value.ty } @@ -2380,11 +1783,11 @@ impl Codegen { ci.vars.append(&mut self.ci.vars); let loc = self.ct_eval(ci, |s, prev| { - s.output.emit_prelude(); + s.ci.emit_prelude(); if s.ci.ret.map_or(true, |r| s.tys.size_of(r) > 16) { let reg = s.ci.regs.allocate(); - s.output.emit(instrs::cp(reg.get(), 1)); + s.ci.emit(instrs::cp(reg.get(), 1)); s.ci.ret_reg = reg; }; @@ -2395,13 +1798,11 @@ impl Codegen { ty = s.ci.ret; - let stash = s.complete_call_graph(); - - s.push_stash(stash); + s.complete_call_graph(); prev.vars.append(&mut s.ci.vars); - s.ci.finalize(&mut s.output); - s.output.emit(tx()); + s.ci.finalize(); + s.ci.emit(tx()); Ok(1) }); @@ -2553,7 +1954,7 @@ impl Codegen { if let Loc::Ct { value, derefed: false } = right && let Some(op) = Self::imm_math_op(op, signed, size) { - self.output.emit(op(lhs.get(), lhs.get(), value.0)); + self.ci.emit(op(lhs.get(), lhs.get(), value.0)); return Some(if let Some(value) = ctx.into_value() { self.store_typed(Loc::reg(lhs.as_ref()), value.loc, value.ty); Value::void() @@ -2565,7 +1966,7 @@ impl Codegen { let rhs = self.loc_to_reg(right, size); if let Some(op) = Self::math_op(op, signed, size) { - self.output.emit(op(lhs.get(), lhs.get(), rhs.get())); + self.ci.emit(op(lhs.get(), lhs.get(), rhs.get())); self.ci.regs.free(rhs); return if let Some(value) = ctx.into_value() { self.store_typed(Loc::reg(lhs), value.loc, value.ty); @@ -2651,11 +2052,10 @@ impl Codegen { fn handle_global(&mut self, id: ty::Global) -> Option { let ptr = self.ci.regs.allocate(); - let rel = Reloc::new(self.local_offset(), 3, 4); + let reloc = Reloc::new(self.ci.code.len(), 3, 4); let global = &mut self.tys.globals[id as usize]; - self.output.relocs.push(OutReloc { from: self.ci.id, to: ty::Kind::Global(id), rel }); - self.output.emit(instrs::lra(ptr.get(), 0, 0)); - global.runtime |= !self.ct.active(); + self.ci.relocs.push(TypedReloc { target: ty::Kind::Global(id).compress(), reloc }); + self.ci.emit(instrs::lra(ptr.get(), 0, 0)); Some(Value { ty: global.ty, loc: Loc::reg(ptr).into_derefed() }) } @@ -2684,44 +2084,17 @@ impl Codegen { } } - #[must_use] - fn complete_call_graph(&mut self) -> Output { - let stash = self.pop_stash(); - self.complete_call_graph_low(); - - self.ci.snap = self.output.snap(); - stash - } - - fn complete_call_graph_low(&mut self) { + fn complete_call_graph(&mut self) { while self.ci.task_base < self.tasks.len() && let Some(task_slot) = self.tasks.pop() { let Some(task) = task_slot else { continue }; self.handle_task(task); } - - //println!("{}", std::backtrace::Backtrace::capture()); - let base = self.output.code.len() as u32; - let prev_data_len = self.output.string_data.len(); - self.output.code.append(&mut self.output.string_data); - // we drain these when linking - for srel in self.output.strings.iter_mut().filter(|s| !s.shifted) { - debug_assert!( - srel.range.end <= prev_data_len as u32, - "{} <= {}", - srel.range.end, - prev_data_len as u32 - ); - debug_assert!(srel.range.start <= srel.range.end); - srel.range.start += base; - srel.range.end += base; - srel.shifted = true; - } } fn handle_task(&mut self, FTask { file, id }: FTask) { - let func = self.tys.funcs[id as usize]; + let func = &self.tys.funcs[id as usize]; debug_assert!(func.file == file); let sig = func.sig.unwrap(); let ast = self.files[file as usize].clone(); @@ -2736,7 +2109,6 @@ impl Codegen { }; let prev_ci = std::mem::replace(&mut self.ci, repl); self.ci.regs.init(); - self.ci.snap = self.output.snap(); let Expr::BinOp { left: Expr::Ident { .. }, @@ -2747,7 +2119,7 @@ impl Codegen { unreachable!("{expr}") }; - self.output.emit_prelude(); + self.ci.emit_prelude(); let mut parama = self.tys.parama(sig.ret); let mut sig_args = sig.args.range(); @@ -2767,7 +2139,7 @@ impl Codegen { if self.tys.size_of(sig.ret) > 16 { let reg = self.ci.regs.allocate(); - self.output.emit(instrs::cp(reg.get(), 1)); + self.ci.emit(instrs::cp(reg.get(), 1)); self.ci.ret_reg = reg; } else { self.ci.ret_reg = reg::Id::RET; @@ -2781,11 +2153,11 @@ impl Codegen { self.ci.free_loc(vars.value.loc); } - self.ci.finalize(&mut self.output); - self.output.emit(jala(ZERO, RET_ADDR, 0)); + self.ci.finalize(); + self.ci.emit(jala(ZERO, RET_ADDR, 0)); self.ci.regs.free(std::mem::take(&mut self.ci.ret_reg)); - self.tys.funcs[id as usize].offset = self.ci.snap.code as Offset; - self.tys.funcs[id as usize].size = self.local_offset(); + self.tys.funcs[id as usize].code.append(&mut self.ci.code); + self.tys.funcs[id as usize].relocs.append(&mut self.ci.relocs); self.pool.cis.push(std::mem::replace(&mut self.ci, prev_ci)); self.ct.vm.write_reg(reg::STACK_PTR, ct_stack_base); } @@ -2805,7 +2177,7 @@ impl Codegen { _ if flags & (idfl::MUTABLE | idfl::REFERENCED) == 0 => { let ptr = parama.next(); let reg = self.ci.regs.allocate(); - self.output.emit(instrs::cp(reg.get(), ptr)); + self.ci.emit(instrs::cp(reg.get(), ptr)); return Loc::reg(reg).into_derefed(); } _ => (Loc::reg(parama.next()).into_derefed(), Loc::stack(self.ci.stack.allocate(size))), @@ -2816,7 +2188,7 @@ impl Codegen { } fn eca(&mut self, trap: trap::Trap, ret: impl Into) -> Value { - self.output.write_trap(trap); + self.ci.write_trap(trap); Value { ty: ret.into(), loc: Loc::reg(1) } } @@ -2855,7 +2227,7 @@ impl Codegen { if reg.is_ref() { let new_reg = self.ci.regs.allocate(); debug_assert_ne!(reg.get(), 0); - self.output.emit(cp(new_reg.get(), reg.get())); + self.ci.emit(cp(new_reg.get(), reg.get())); reg = new_reg; } reg @@ -2925,25 +2297,25 @@ impl Codegen { match (src.as_ref(), dst.as_ref()) { (&Loc::Ct { value, derefed }, lpat!(true, reg, off, ref sta)) => { let ct = self.ci.regs.allocate(); - self.output.emit(li64(ct.get(), ensure_loaded(value, derefed, size))); + self.ci.emit(li64(ct.get(), ensure_loaded(value, derefed, size))); let off = self.opt_stack_reloc(sta.as_ref(), off, 3); - self.output.emit(st(ct.get(), reg.get(), off, size as _)); + self.ci.emit(st(ct.get(), reg.get(), off, size as _)); self.ci.regs.free(ct); } (&Loc::Ct { value, derefed }, lpat!(false, reg, 0, None)) => { - self.output.emit(li64(reg.get(), ensure_loaded(value, derefed, size))) + self.ci.emit(li64(reg.get(), ensure_loaded(value, derefed, size))) } (&Loc::Ct { value, derefed }, lpat!(false, reg, 8, None)) if reg.get() == 1 && size == 8 => { - self.output.emit(li64(reg.get() + 1, ensure_loaded(value, derefed, size))); + self.ci.emit(li64(reg.get() + 1, ensure_loaded(value, derefed, size))); } (&Loc::Ct { value, derefed }, lpat!(false, reg, off, None)) if reg.get() == 1 => { let freg = reg.get() + (off / 8) as u8; let mask = !(((1u64 << (8 * size)) - 1) << (8 * (off % 8))); - self.output.emit(andi(freg, freg, mask)); + self.ci.emit(andi(freg, freg, mask)); let value = ensure_loaded(value, derefed, size) << (8 * (off % 8)); - self.output.emit(ori(freg, freg, value)); + self.ci.emit(ori(freg, freg, value)); } (lpat!(true, src, soff, ref ssta), lpat!(true, dst, doff, ref dsta)) => { // TODO: some oportuinies to ellit more optimal code @@ -2951,26 +2323,26 @@ impl Codegen { let dst_off = if dst.is_ref() { self.ci.regs.allocate() } else { dst.as_ref() }; self.stack_offset(src_off.get(), src.get(), ssta.as_ref(), soff); self.stack_offset(dst_off.get(), dst.get(), dsta.as_ref(), doff); - self.output.emit(bmc(src_off.get(), dst_off.get(), size as _)); + self.ci.emit(bmc(src_off.get(), dst_off.get(), size as _)); self.ci.regs.free(src_off); self.ci.regs.free(dst_off); } (lpat!(false, src, 0, None), lpat!(false, dst, 0, None)) => { if src != dst { debug_assert_ne!(src.get(), 0); - self.output.emit(cp(dst.get(), src.get())); + self.ci.emit(cp(dst.get(), src.get())); } } (lpat!(true, src, soff, ref ssta), lpat!(false, dst, 0, None)) => { if size < 8 { - self.output.emit(cp(dst.get(), 0)); + self.ci.emit(cp(dst.get(), 0)); } let off = self.opt_stack_reloc(ssta.as_ref(), soff, 3); - self.output.emit(ld(dst.get(), src.get(), off, size as _)); + self.ci.emit(ld(dst.get(), src.get(), off, size as _)); } (lpat!(false, src, 0, None), lpat!(true, dst, doff, ref dsta)) => { let off = self.opt_stack_reloc(dsta.as_ref(), doff, 3); - self.output.emit(st(src.get(), dst.get(), off, size as _)) + self.ci.emit(st(src.get(), dst.get(), off, size as _)) } (a, b) => unreachable!("{a:?} {b:?}"), } @@ -2981,12 +2353,12 @@ impl Codegen { fn stack_offset(&mut self, dst: u8, op: u8, stack: Option<&stack::Id>, off: Offset) { let Some(stack) = stack else { - self.output.emit_addi(dst, op, off as _); + self.ci.emit_addi(dst, op, off as _); return; }; let off = self.stack_reloc(stack, off, 3); - self.output.emit(addi64(dst, op, off)); + self.ci.emit(addi64(dst, op, off)); } fn opt_stack_reloc(&mut self, stack: Option<&stack::Id>, off: Offset, sub_offset: u8) -> u64 { @@ -2994,63 +2366,11 @@ impl Codegen { } fn stack_reloc(&mut self, stack: &stack::Id, off: Offset, sub_offset: u8) -> u64 { - let offset = self.local_offset(); + let offset = self.ci.code.len(); self.ci.stack_relocs.push(Reloc::new(offset, sub_offset, 8)); Reloc::pack_srel(stack, off) } - fn link(&mut self) { - let ct_hint = self.ct.active().then_some(self.ci.snap.code as u32); - - for reloc in &self.output.relocs { - if !self.tys.is_runtime_item(reloc.from) && !self.ct.active() { - continue; - } - - let Some(to_offset) = self.tys.offset_of_item(reloc.to, ct_hint) else { - continue; - }; - - let from_offset = self.tys.offset_of_item(reloc.from, ct_hint).unwrap(); - reloc.rel.apply_jump(&mut self.output.code, to_offset, from_offset); - } - - //self.compress_strings(); - for reloc in self.output.strings.iter().filter(|s| s.shifted) { - let Some(from_offset) = self.tys.offset_of_item(reloc.from, ct_hint) else { continue }; - reloc.reloc.apply_jump(&mut self.output.code, reloc.range.start, from_offset); - } - } - - //fn compress_strings(&mut self) { - // // FIXME: we can go faster - // self.output - // .strings - // .sort_by(|a, b| self.string_data[b.range()].cmp(&self.string_data[a.range()])); - - // let mut cursor = 0; - // let mut anchor = 0; - // for i in 1..self.output.strings.len() { - // let [a, b] = self.output.strings.get_many_mut([anchor, i]).unwrap(); - // if self.string_data[a.range()].ends_with(&self.string_data[b.range()]) { - // b.range.end = a.range.end; - // b.range.start = a.range.end - (b.range.end - b.range.start); - // } else { - // self.string_data.copy_within(a.range(), cursor); - // cursor += a.range.len(); - // anchor = i; - // } - // } - - // if !self.output.strings.is_empty() { - // let a = &self.output.strings[anchor]; - // self.string_data.copy_within(a.range(), cursor); - // cursor += a.range.len(); - // } - - // self.string_data.truncate(cursor) - //} - // TODO: sometimes its better to do this in bulk fn ty(&mut self, expr: &Expr) -> ty::Id { ty::Id::from(self.eval_const(expr, ty::TYPE)) @@ -3069,8 +2389,7 @@ impl Codegen { self.ct.vm.pc = self.ct.vm.pc.wrapping_add(trap.size() + 1); let mut extra_jump = 0; - let mut local_pc = (self.ct.vm.pc.get() as usize - self.output.code.as_ptr() as usize) - .checked_sub(self.ci.snap.code); + let mut code_index = Some(self.ct.vm.pc.get() as usize - self.ci.code.as_ptr() as usize); match *trap { trap::Trap::MakeStruct(trap::MakeStruct { file, struct_expr }) => { @@ -3107,15 +2426,15 @@ impl Codegen { self.ct.vm.write_reg(1, ty.repr()); extra_jump = jal(0, 0, 0).0 + tx().0; } else { - local_pc = None; + code_index = None; self.run_vm(); self.tys.syms.insert(sym, self.ct.vm.read_reg(1).0.into()); } } } - if let Some(lpc) = local_pc { - let offset = lpc + self.ci.snap.code + self.output.code.as_ptr() as usize; + if let Some(lpc) = code_index { + let offset = lpc + self.ci.code.as_ptr() as usize; self.ct.vm.pc = hbvm::mem::Address::new(offset as _); } self.ct.vm.pc += extra_jump; @@ -3141,8 +2460,8 @@ impl Codegen { if let Some(existing) = self.tys.syms.get(&SymKey { file, ident }) { if let ty::Kind::Func(id) = existing.expand() && let func = &mut self.tys.funcs[id as usize] - && func.offset != u32::MAX && let Err(idx) = task::unpack(func.offset) + && idx < self.tasks.len() { func.offset = task::id(self.tasks.len()); let task = self.tasks[idx].take(); @@ -3182,9 +2501,7 @@ impl Codegen { debug_assert!(refr.get(&f).is_some()); refr }, - runtime: false, - offset: u32::MAX, - size: 0, + ..Default::default() }; let id = self.tys.funcs.len() as _; @@ -3199,13 +2516,7 @@ impl Codegen { } => ty::Kind::Struct(self.build_struct(fields)), Expr::BinOp { left, op: TokenKind::Decl, right } => { let gid = self.tys.globals.len() as ty::Global; - self.tys.globals.push(Global { - offset: u32::MAX, - ty: Default::default(), - runtime: false, - _file: file, - _name: ident, - }); + self.tys.globals.push(Global { file, name: ident, ..Default::default() }); let ci = ItemCtx { file, @@ -3230,7 +2541,6 @@ impl Codegen { fn make_func_reachable(&mut self, func: ty::Func) { let fuc = &mut self.tys.funcs[func as usize]; - fuc.runtime |= !self.ct.active(); if fuc.offset == u32::MAX { fuc.offset = task::id(self.tasks.len() as _); self.tasks.push(Some(FTask { file: fuc.file, id: func })); @@ -3238,10 +2548,10 @@ impl Codegen { } fn generate_global(&mut self, expr: &Expr, file: FileId, name: Ident) -> Global { - self.output.emit_prelude(); + self.ci.emit_prelude(); let ret = self.ci.regs.allocate(); - self.output.emit(instrs::cp(ret.get(), 1)); + self.ci.emit(instrs::cp(ret.get(), 1)); self.ci.task_base = self.tasks.len(); let ctx = Ctx::default().with_loc(Loc::reg(ret).into_derefed()); @@ -3249,34 +2559,18 @@ impl Codegen { self.report(expr.pos(), "expression is not reachable"); }; - let stash = self.complete_call_graph(); + self.complete_call_graph(); - let offset = self.ci.snap.code; - self.ci.snap.code += self.tys.size_of(ret.ty) as usize; - self.output.code.resize(self.ci.snap.code, 0); + let mut data = vec![0; self.tys.size_of(ret.ty) as usize]; - self.push_stash(stash); + self.ci.finalize(); + self.ci.emit(tx()); - self.ci.finalize(&mut self.output); - self.output.emit(tx()); - - let ret_loc = unsafe { self.output.code.as_mut_ptr().add(offset) }; - self.ct.vm.write_reg(1, ret_loc as u64); + self.ct.vm.write_reg(1, data.as_mut_ptr() as u64); self.ci.free_loc(ret.loc); - Global { ty: ret.ty, offset: offset as _, runtime: false, _file: file, _name: name } - } - - fn pop_stash(&mut self) -> Output { - let mut stash = self.pool.outputs.pop().unwrap_or_default(); - self.output.pop(&mut stash, &self.ci.snap); - stash - } - - fn push_stash(&mut self, mut stash: Output) { - self.output.append(&mut stash); - self.pool.outputs.push(stash); + Global { ty: ret.ty, file, name, data, ..Default::default() } } fn ct_eval( @@ -3285,12 +2579,8 @@ impl Codegen { compile: impl FnOnce(&mut Self, &mut ItemCtx) -> Result, ) -> Result { log::trc!("eval"); - self.ct.enter(); - let stash = self.pop_stash(); let mut prev_ci = std::mem::replace(&mut self.ci, ci); - self.ci.snap = self.output.snap(); - debug_assert_eq!(self.ci.snap, prev_ci.snap); self.ci.task_base = self.tasks.len(); self.ci.regs.init(); @@ -3298,12 +2588,18 @@ impl Codegen { let mut rr = std::mem::take(&mut self.ci.ret_reg); let is_on_stack = !rr.is_ref(); if !rr.is_ref() { - self.output.emit(instrs::cp(1, rr.get())); + self.ci.emit(instrs::cp(1, rr.get())); let rref = rr.as_ref(); self.ci.regs.free(std::mem::replace(&mut rr, rref)); } if ret.is_ok() { + let last_fn = self.tys.funcs.len(); + self.tys.funcs.push(Default::default()); + + self.tys.funcs[last_fn].code.append(&mut self.ci.code); + self.tys.funcs[last_fn].relocs.append(&mut self.ci.relocs); + if is_on_stack { let size = self.tys.size_of(self.ci.ret.expect("you have died (colaterall fuck up)")); @@ -3312,16 +2608,19 @@ impl Codegen { self.ct.vm.write_reg(1, slot); } - self.link(); - self.output.trunc(&Snapshot { code: self.output.code.len(), ..self.ci.snap }); - let entry = &mut self.output.code[self.ci.snap.code] as *mut _ as _; + self.tys.dump_reachable(last_fn as _, &mut self.ct.code); + let entry = &mut self.ct.code[self.tys.funcs[last_fn].offset as usize] as *mut _ as _; let prev_pc = std::mem::replace(&mut self.ct.vm.pc, hbvm::mem::Address::new(entry)); #[cfg(debug_assertions)] { let mut vc = Vec::::new(); - if self.disasm(&mut vc).is_err() { - panic!("{}", String::from_utf8(vc).unwrap()); + if let Err(e) = self.tys.disasm(&self.ct.code, &self.files, &mut vc, |bts| { + if let Some(trap) = Self::read_trap(bts.as_ptr() as _) { + bts.take(..trap.size() + 1).unwrap(); + } + }) { + panic!("{e} {}", String::from_utf8(vc).unwrap()); } else { //log::inf!("{}", String::from_utf8(vc).unwrap()); } @@ -3329,98 +2628,21 @@ impl Codegen { self.run_vm(); self.ct.vm.pc = prev_pc; + + self.tys.funcs.pop().unwrap(); } - self.output.trunc(&self.ci.snap); self.pool.cis.push(std::mem::replace(&mut self.ci, prev_ci)); - self.ci.snap = self.output.snap(); - self.push_stash(stash); - self.ct.exit(); log::trc!("eval-end"); ret } pub fn disasm(&mut self, output: &mut impl std::io::Write) -> std::io::Result<()> { - use crate::DisasmItem; - let mut sluce = self.output.code.as_slice(); - let functions = self - .ct - .active() - .then_some(( - self.ci.snap.code as u32, - ( - "target_fn", - (self.output.code.len() - self.ci.snap.code) as u32, - DisasmItem::Func, - ), - )) - .into_iter() - .chain( - self.tys - .funcs - .iter() - .enumerate() - .filter(|&(i, f)| { - task::unpack(f.offset).is_ok() - && (f.runtime || self.ct.active()) - && (!self.ct.active() || i != 0) - && self.is_fully_linked(i as ty::Func) - }) - .map(|(_, f)| { - let file = &self.files[f.file as usize]; - let Expr::BinOp { left: &Expr::Ident { name, .. }, .. } = - f.expr.get(file).unwrap() - else { - unreachable!() - }; - (f.offset, (name, f.size, DisasmItem::Func)) - }), - ) - .chain( - self.tys - .globals - .iter() - .filter(|g| task::unpack(g.offset).is_ok() && (g.runtime || self.ct.active())) - .map(|g| { - let file = &self.files[g._file as usize]; - - ( - g.offset, - (file.ident_str(g._name), self.tys.size_of(g.ty), DisasmItem::Global), - ) - }), - ) - .chain(self.output.strings.iter().map(|s| { - ( - s.range.start, - ( - std::str::from_utf8( - &self.output.code[s.range.start as usize..s.range.end as usize - 1], - ) - .unwrap_or("!!!!invalid string"), - s.range.len() as _, - DisasmItem::Global, - ), - ) - })) - .collect::>(); - crate::disasm(&mut sluce, &functions, output, |bin| { - if self.ct.active() - && let Some(trap) = Self::read_trap(bin.as_ptr() as u64) - { - bin.take(..trap.size() + 1).unwrap(); - } - }) - } - - fn is_fully_linked(&self, func: ty::Func) -> bool { - self.output - .relocs - .iter() - .filter(|r| r.from == ty::Kind::Func(func)) - .all(|r| self.tys.offset_of_item(r.to, None).is_some()) + let mut bin = Vec::new(); + self.tys.assemble(&mut bin); + self.tys.disasm(&bin, &self.files, output, |_| {}) } fn run_vm(&mut self) { @@ -3487,23 +2709,11 @@ impl Codegen { } fn local_code(&mut self) -> &mut [u8] { - &mut self.output.code[self.ci.snap.code..] + &mut self.ci.code } fn local_offset(&self) -> u32 { - (self.output.code.len() - self.ci.snap.code) as u32 - } - - fn local_snap(&self) -> Snapshot { - Snapshot { - code: self.output.code.len() - self.ci.snap.code, - relocs: self.output.relocs.len() - self.ci.snap.relocs, - } - } - - fn pop_local_snap(&mut self, snap: Snapshot) { - self.output.code.truncate(snap.code + self.ci.snap.code); - self.output.relocs.truncate(snap.relocs + self.ci.snap.relocs); + self.ci.code.len() as _ } fn pack_args(&mut self, pos: Pos, arg_base: usize) -> ty::Tuple { @@ -3523,12 +2733,18 @@ impl Codegen { fn cow_reg(&mut self, rhs: reg::Id) -> reg::Id { if rhs.is_ref() { let reg = self.ci.regs.allocate(); - self.output.emit(cp(reg.get(), rhs.get())); + self.ci.emit(cp(reg.get(), rhs.get())); reg } else { rhs } } + + pub fn assemble(&mut self, buf: &mut Vec) { + self.tys.funcs.iter_mut().for_each(|f| f.offset = u32::MAX); + self.tys.globals.iter_mut().for_each(|g| g.offset = u32::MAX); + self.tys.assemble(buf) + } } #[cfg(test)] @@ -3541,10 +2757,10 @@ mod tests { codegen.generate(); let mut out = Vec::new(); - codegen.dump(&mut out).unwrap(); + codegen.assemble(&mut out); let mut buf = Vec::::new(); - let err = codegen.disasm(&mut buf); + let err = codegen.tys.disasm(&out, &codegen.files, &mut buf, |_| {}); output.push_str(String::from_utf8(buf).unwrap().as_str()); if err.is_err() { return; diff --git a/hblang/src/lib.rs b/hblang/src/lib.rs index e038b2c..9626a3a 100644 --- a/hblang/src/lib.rs +++ b/hblang/src/lib.rs @@ -17,12 +17,13 @@ extract_if, ptr_internals )] -#![allow(internal_features, clippy::format_collect, dead_code)] +#![allow(internal_features, clippy::format_collect)] use { self::{ ident::Ident, - parser::{ExprRef, FileId}, + parser::{Expr, ExprRef, FileId}, + son::reg, ty::ArrayLen, }, parser::Ast, @@ -53,6 +54,108 @@ pub mod son; mod instrs; mod lexer; +mod task { + use super::Offset; + + pub fn unpack(offset: Offset) -> Result { + if offset >> 31 != 0 { + Err((offset & !(1 << 31)) as usize) + } else { + Ok(offset) + } + } + + pub fn is_done(offset: Offset) -> bool { + unpack(offset).is_ok() + } + + pub fn id(index: usize) -> Offset { + 1 << 31 | index as u32 + } +} + +mod ident { + pub type Ident = u32; + + const LEN_BITS: u32 = 6; + + pub fn len(ident: u32) -> u32 { + ident & ((1 << LEN_BITS) - 1) + } + + pub fn is_null(ident: u32) -> bool { + (ident >> LEN_BITS) == 0 + } + + pub fn pos(ident: u32) -> u32 { + (ident >> LEN_BITS).saturating_sub(1) + } + + pub fn new(pos: u32, len: u32) -> u32 { + debug_assert!(len < (1 << LEN_BITS)); + ((pos + 1) << LEN_BITS) | len + } + + pub fn range(ident: u32) -> std::ops::Range { + let (len, pos) = (len(ident) as usize, pos(ident) as usize); + pos..pos + len + } +} + +mod log { + #![allow(unused_macros)] + + #[derive(PartialOrd, PartialEq, Ord, Eq, Debug)] + pub enum Level { + Err, + Wrn, + Inf, + Dbg, + Trc, + } + + pub const LOG_LEVEL: Level = match option_env!("LOG_LEVEL") { + Some(val) => match val.as_bytes()[0] { + b'e' => Level::Err, + b'w' => Level::Wrn, + b'i' => Level::Inf, + b'd' => Level::Dbg, + b't' => Level::Trc, + _ => panic!("Invalid log level."), + }, + None => { + if cfg!(debug_assertions) { + Level::Dbg + } else { + Level::Err + } + } + }; + + macro_rules! log { + ($level:expr, $fmt:literal $($expr:tt)*) => { + if $level <= $crate::log::LOG_LEVEL { + eprintln!("{:?}: {}", $level, format_args!($fmt $($expr)*)); + } + }; + + ($level:expr, $($arg:expr),+) => { + if $level <= $crate::log::LOG_LEVEL { + $(eprintln!("[{}:{}:{}][{:?}]: {} = {:?}", line!(), column!(), file!(), $level, stringify!($arg), $arg);)* + } + }; + } + + macro_rules! err { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Err, $($arg)*) }; } + macro_rules! wrn { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Wrn, $($arg)*) }; } + macro_rules! inf { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Inf, $($arg)*) }; } + macro_rules! dbg { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Dbg, $($arg)*) }; } + macro_rules! trc { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Trc, $($arg)*) }; } + + #[allow(unused_imports)] + pub(crate) use {dbg, err, inf, log, trc, wrn}; +} + mod ty { use { crate::{ @@ -70,7 +173,6 @@ mod ty { pub type Func = u32; pub type Global = u32; pub type Module = u32; - pub type Param = u32; pub type Slice = u32; #[derive(Clone, Copy)] @@ -102,10 +204,6 @@ mod ty { self.0 as usize & Self::LEN_MASK } - pub fn is_empty(self) -> bool { - self.0 == 0 - } - pub fn empty() -> Self { Self(0) } @@ -435,8 +533,8 @@ impl Default for Global { ty: Default::default(), offset: u32::MAX, data: Default::default(), - file: 0, - name: 0, + file: u32::MAX, + name: u32::MAX, } } } @@ -511,7 +609,99 @@ struct Types { arrays: Vec, } +fn emit(out: &mut Vec, (len, instr): (usize, [u8; instrs::MAX_SIZE])) { + out.extend_from_slice(&instr[..len]); +} + impl Types { + fn assemble(&mut self, to: &mut Vec) { + emit(to, instrs::jal(reg::RET_ADDR, reg::ZERO, 0)); + emit(to, instrs::tx()); + self.dump_reachable(0, to); + Reloc::new(0, 3, 4).apply_jump(to, self.funcs[0].offset, 0); + } + + fn dump_reachable(&mut self, from: ty::Func, to: &mut Vec) { + let mut frontier = vec![ty::Kind::Func(from).compress()]; + + while let Some(itm) = frontier.pop() { + match itm.expand() { + ty::Kind::Func(func) => { + let fuc = &mut self.funcs[func as usize]; + if task::is_done(fuc.offset) { + continue; + } + fuc.offset = to.len() as _; + to.extend(&fuc.code); + frontier.extend(fuc.relocs.iter().map(|r| r.target)); + } + ty::Kind::Global(glob) => { + let glb = &mut self.globals[glob as usize]; + if task::is_done(glb.offset) { + continue; + } + glb.offset = to.len() as _; + to.extend(&glb.data); + } + _ => unreachable!(), + } + } + + for fuc in &self.funcs { + if !task::is_done(fuc.offset) { + continue; + } + + for rel in &fuc.relocs { + let offset = match rel.target.expand() { + ty::Kind::Func(fun) => self.funcs[fun as usize].offset, + ty::Kind::Global(glo) => self.globals[glo as usize].offset, + _ => unreachable!(), + }; + rel.reloc.apply_jump(to, offset, fuc.offset); + } + } + } + + pub fn disasm( + &self, + mut sluce: &[u8], + files: &[parser::Ast], + output: &mut impl std::io::Write, + eca_handler: impl FnMut(&mut &[u8]), + ) -> std::io::Result<()> { + use crate::DisasmItem; + let functions = self + .funcs + .iter() + .filter(|f| task::is_done(f.offset)) + .map(|f| { + let name = if f.file != u32::MAX { + let file = &files[f.file as usize]; + let Expr::BinOp { left: &Expr::Ident { name, .. }, .. } = + f.expr.get(file).unwrap() + else { + unreachable!() + }; + name + } else { + "target_fn" + }; + (f.offset, (name, f.code.len() as u32, DisasmItem::Func)) + }) + .chain(self.globals.iter().filter(|g| task::is_done(g.offset)).map(|g| { + let name = if g.file == u32::MAX { + std::str::from_utf8(&g.data).unwrap() + } else { + let file = &files[g.file as usize]; + file.ident_str(g.name) + }; + (g.offset, (name, g.data.len() as Size, DisasmItem::Global)) + })) + .collect::>(); + crate::disasm(&mut sluce, &functions, output, eca_handler) + } + fn parama(&self, ret: impl Into) -> ParamAlloc { ParamAlloc(2 + (9..=16).contains(&self.size_of(ret.into())) as u8..12) } @@ -622,88 +812,6 @@ impl Types { } } -mod ident { - pub type Ident = u32; - - const LEN_BITS: u32 = 6; - - pub fn len(ident: u32) -> u32 { - ident & ((1 << LEN_BITS) - 1) - } - - pub fn is_null(ident: u32) -> bool { - (ident >> LEN_BITS) == 0 - } - - pub fn pos(ident: u32) -> u32 { - (ident >> LEN_BITS).saturating_sub(1) - } - - pub fn new(pos: u32, len: u32) -> u32 { - debug_assert!(len < (1 << LEN_BITS)); - ((pos + 1) << LEN_BITS) | len - } - - pub fn range(ident: u32) -> std::ops::Range { - let (len, pos) = (len(ident) as usize, pos(ident) as usize); - pos..pos + len - } -} - -mod log { - #![allow(unused_macros)] - - #[derive(PartialOrd, PartialEq, Ord, Eq, Debug)] - pub enum Level { - Err, - Wrn, - Inf, - Dbg, - Trc, - } - - pub const LOG_LEVEL: Level = match option_env!("LOG_LEVEL") { - Some(val) => match val.as_bytes()[0] { - b'e' => Level::Err, - b'w' => Level::Wrn, - b'i' => Level::Inf, - b'd' => Level::Dbg, - b't' => Level::Trc, - _ => panic!("Invalid log level."), - }, - None => { - if cfg!(debug_assertions) { - Level::Dbg - } else { - Level::Err - } - } - }; - - macro_rules! log { - ($level:expr, $fmt:literal $($expr:tt)*) => { - if $level <= $crate::log::LOG_LEVEL { - eprintln!("{:?}: {}", $level, format_args!($fmt $($expr)*)); - } - }; - - ($level:expr, $($arg:expr),+) => { - if $level <= $crate::log::LOG_LEVEL { - $(eprintln!("[{}:{}:{}][{:?}]: {} = {:?}", line!(), column!(), file!(), $level, stringify!($arg), $arg);)* - } - }; - } - - macro_rules! err { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Err, $($arg)*) }; } - macro_rules! wrn { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Wrn, $($arg)*) }; } - macro_rules! inf { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Inf, $($arg)*) }; } - macro_rules! dbg { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Dbg, $($arg)*) }; } - macro_rules! trc { ($($arg:tt)*) => { $crate::log::log!($crate::log::Level::Trc, $($arg)*) }; } - - #[allow(unused_imports)] - pub(crate) use {dbg, err, inf, log, trc, wrn}; -} - #[inline] unsafe fn encode(instr: T) -> (usize, [u8; instrs::MAX_SIZE]) { let mut buf = [0; instrs::MAX_SIZE]; @@ -1463,7 +1571,9 @@ pub fn run_compiler( if options.dump_asm { codegen.disasm(out)?; } else { - codegen.dump(out)?; + let mut buf = Vec::new(); + codegen.assemble(&mut buf); + out.write_all(&buf)?; } } diff --git a/hblang/src/parser.rs b/hblang/src/parser.rs index 85aaa87..9f5c8a6 100644 --- a/hblang/src/parser.rs +++ b/hblang/src/parser.rs @@ -1,6 +1,5 @@ use { crate::{ - codegen, ident::{self, Ident}, lexer::{self, Lexer, Token, TokenKind}, }, @@ -224,7 +223,7 @@ impl<'a, 'b> Parser<'a, 'b> { let is_ct = token.kind == TokenKind::CtIdent; let name = self.lexer.slice(token.range()); - if let Some(builtin) = codegen::ty::from_str(name) { + if let Some(builtin) = crate::ty::from_str(name) { return (builtin, 0); } diff --git a/hblang/src/son.rs b/hblang/src/son.rs index 18f4657..2519e23 100644 --- a/hblang/src/son.rs +++ b/hblang/src/son.rs @@ -10,12 +10,12 @@ use { idfl::{self}, Expr, ExprRef, FileId, Pos, }, - ty, Field, Func, Reloc, Sig, Struct, SymKey, TypedReloc, Types, + task, ty, Field, Func, Reloc, Sig, Struct, SymKey, TypedReloc, Types, }, core::fmt, std::{ cell::RefCell, - collections::{hash_map, BTreeMap}, + collections::hash_map, fmt::{Debug, Display, Write}, hash::{Hash as _, Hasher}, mem::{self, MaybeUninit}, @@ -331,7 +331,7 @@ impl BitSet { type Nid = u16; -mod reg { +pub mod reg { pub const STACK_PTR: Reg = 254; pub const ZERO: Reg = 0; @@ -1327,7 +1327,7 @@ impl ItemCtx { } fn emit(&mut self, instr: (usize, [u8; instrs::MAX_SIZE])) { - emit(&mut self.code, instr); + crate::emit(&mut self.code, instr); } fn free_loc(&mut self, loc: impl Into>) { @@ -1337,31 +1337,11 @@ impl ItemCtx { } } -fn emit(out: &mut Vec, (len, instr): (usize, [u8; instrs::MAX_SIZE])) { - out.extend_from_slice(&instr[..len]); -} - fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) { let value = value.to_ne_bytes(); doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]); } -mod task { - use super::Offset; - - pub fn unpack(offset: Offset) -> Result { - if offset >> 31 != 0 { - Err((offset & !(1 << 31)) as usize) - } else { - Ok(offset) - } - } - - pub fn id(index: usize) -> Offset { - 1 << 31 | index as u32 - } -} - struct FTask { file: FileId, id: ty::Func, @@ -1417,83 +1397,6 @@ impl Codegen { self.complete_call_graph_low(); } - fn assemble(&mut self, to: &mut Vec) { - emit(to, instrs::jal(reg::RET_ADDR, reg::ZERO, 0)); - emit(to, instrs::tx()); - self.dump_reachable(0, to); - Reloc::new(0, 3, 4).apply_jump(to, self.tys.funcs[0].offset, 0); - } - - fn dump_reachable(&mut self, from: ty::Func, to: &mut Vec) { - let mut frontier = vec![ty::Kind::Func(from).compress()]; - - while let Some(itm) = frontier.pop() { - match itm.expand() { - ty::Kind::Func(func) => { - let fuc = &mut self.tys.funcs[func as usize]; - if task::unpack(fuc.offset).is_ok() { - continue; - } - fuc.offset = to.len() as _; - to.extend(&fuc.code); - frontier.extend(fuc.relocs.iter().map(|r| r.target)); - } - ty::Kind::Global(glob) => { - let glb = &mut self.tys.globals[glob as usize]; - if task::unpack(glb.offset).is_ok() { - continue; - } - glb.offset = to.len() as _; - to.extend(&glb.data); - } - _ => unreachable!(), - } - } - - for fuc in &self.tys.funcs { - if task::unpack(fuc.offset).is_err() { - continue; - } - - for rel in &fuc.relocs { - let offset = match rel.target.expand() { - ty::Kind::Func(fun) => self.tys.funcs[fun as usize].offset, - ty::Kind::Global(glo) => self.tys.globals[glo as usize].offset, - _ => unreachable!(), - }; - rel.reloc.apply_jump(to, offset, fuc.offset); - } - } - } - - pub fn disasm( - &mut self, - mut sluce: &[u8], - output: &mut impl std::io::Write, - ) -> std::io::Result<()> { - use crate::DisasmItem; - let functions = self - .tys - .funcs - .iter() - .filter(|f| task::unpack(f.offset).is_ok()) - .map(|f| { - let file = &self.files[f.file as usize]; - let Expr::BinOp { left: &Expr::Ident { name, .. }, .. } = f.expr.get(file).unwrap() - else { - unreachable!() - }; - (f.offset, (name, f.code.len() as u32, DisasmItem::Func)) - }) - .chain(self.tys.globals.iter().filter(|g| task::unpack(g.offset).is_ok()).map(|g| { - let file = &self.files[g.file as usize]; - - (g.offset, (file.ident_str(g.name), self.tys.size_of(g.ty), DisasmItem::Global)) - })) - .collect::>(); - crate::disasm(&mut sluce, &functions, output, |_| {}) - } - fn make_func_reachable(&mut self, func: ty::Func) { let fuc = &mut self.tys.funcs[func as usize]; if fuc.offset == u32::MAX { @@ -2556,7 +2459,10 @@ impl Codegen { 1..=8 => { let reg = params.next(); if i == index as usize - 1 { - emit(&mut self.ci.code, instrs::cp(node_loc!(self, expr).reg, reg)); + crate::emit( + &mut self.ci.code, + instrs::cp(node_loc!(self, expr).reg, reg), + ); } } s => todo!("{s}"), @@ -3142,10 +3048,10 @@ mod tests { } let mut out = Vec::new(); - codegen.assemble(&mut out); + codegen.tys.assemble(&mut out); let mut buf = Vec::::new(); - let err = codegen.disasm(&out, &mut buf); + let err = codegen.tys.disasm(&out, &codegen.files, &mut buf, |_| {}); output.push_str(String::from_utf8(buf).unwrap().as_str()); if let Err(e) = err { writeln!(output, "!!! asm is invalid: {e}").unwrap();