use std::{ops::Range, rc::Rc};
use crate::{
ident::{self, Ident},
instrs::{self, *},
lexer::TokenKind,
log,
parser::{self, idfl, Expr, ExprRef, FileId, Pos},
HashMap,
};
use self::reg::{RET_ADDR, STACK_PTR, ZERO};
type Offset = u32;
type Size = u32;
mod stack {
use std::num::NonZeroU32;
use super::{Offset, Size};
#[derive(Debug, PartialEq, Eq)]
pub struct Id(NonZeroU32);
impl Id {
fn index(&self) -> usize {
(self.0.get() as usize - 1) & !(1 << 31)
}
pub fn repr(&self) -> u32 {
self.0.get()
}
pub fn as_ref(&self) -> Self {
Self(unsafe { NonZeroU32::new_unchecked(self.0.get() | 1 << 31) })
}
pub fn is_ref(&self) -> bool {
self.0.get() & (1 << 31) != 0
}
}
impl Drop for Id {
fn drop(&mut self) {
if !std::thread::panicking() && !self.is_ref() {
unreachable!("stack id leaked: {:?}", self.0);
}
}
}
#[derive(PartialEq)]
struct Meta {
size: Size,
offset: Offset,
rc: u32,
}
#[derive(Default)]
pub struct Alloc {
height: Size,
pub max_height: Size,
meta: Vec,
}
impl Alloc {
pub fn integriy_check(&self) {
for meta in self.meta.iter() {
assert!(meta.offset <= self.max_height);
}
}
pub fn allocate(&mut self, size: Size) -> Id {
self.meta.push(Meta {
size,
offset: 0,
rc: 1,
});
self.height += size;
self.max_height = self.max_height.max(self.height);
Id(unsafe { NonZeroU32::new_unchecked(self.meta.len() as u32) })
}
pub fn free(&mut self, id: Id) {
if id.is_ref() {
return;
}
let meta = &mut self.meta[id.index()];
std::mem::forget(id);
meta.rc -= 1;
if meta.rc != 0 {
return;
}
meta.offset = self.height;
self.height -= meta.size;
}
pub fn finalize_leaked(&mut self) {
for meta in self.meta.iter_mut().filter(|m| m.rc > 0) {
meta.offset = self.height;
self.height -= meta.size;
}
}
pub fn clear(&mut self) {
self.height = 0;
self.max_height = 0;
self.meta.clear();
}
pub fn final_offset(&self, id: u32, extra_offset: Offset) -> Offset {
debug_assert_ne!(id, 0);
(self.max_height - self.meta[(id as usize - 1) & !(1 << 31)].offset) + extra_offset
}
}
}
mod reg {
pub const STACK_PTR: Reg = 254;
pub const ZERO: Reg = 0;
pub const RET: Reg = 1;
pub const RET_ADDR: Reg = 31;
type Reg = u8;
#[derive(Default, Debug, PartialEq, Eq)]
pub struct Id(Reg, bool);
impl Id {
pub const RET: Self = Id(RET, false);
pub fn get(&self) -> Reg {
self.0
}
pub fn as_ref(&self) -> Self {
Self(self.0, false)
}
pub fn is_ref(&self) -> bool {
!self.1
}
}
impl From for Id {
fn from(value: u8) -> Self {
Self(value, false)
}
}
impl Drop for Id {
fn drop(&mut self) {
if !std::thread::panicking() && self.1 {
unreachable!("reg id leaked: {:?}", self.0);
}
}
}
#[derive(Default, PartialEq, Eq)]
pub struct Alloc {
free: Vec,
max_used: Reg,
}
impl Alloc {
pub fn init(&mut self) {
self.free.clear();
self.free.extend((32..=253).rev());
self.max_used = RET_ADDR;
}
pub fn allocate(&mut self) -> Id {
let reg = self.free.pop().expect("TODO: we need to spill");
self.max_used = self.max_used.max(reg);
Id(reg, true)
}
pub fn free(&mut self, reg: Id) {
if reg.1 {
self.free.push(reg.0);
std::mem::forget(reg);
}
}
pub fn pushed_size(&self) -> usize {
((self.max_used as usize).saturating_sub(RET_ADDR as usize) + 1) * 8
}
}
}
pub mod ty {
use std::num::NonZeroU32;
use crate::{
lexer::TokenKind,
parser::{self, Expr},
};
pub type Builtin = u32;
pub type Struct = u32;
pub type Ptr = u32;
pub type Func = u32;
pub type Global = u32;
pub type Module = u32;
#[derive(Clone, Copy)]
pub struct Tuple(pub u32);
impl Tuple {
const LEN_BITS: u32 = 5;
const MAX_LEN: usize = 1 << Self::LEN_BITS;
const LEN_MASK: usize = Self::MAX_LEN - 1;
pub fn new(pos: usize, len: usize) -> Option {
if len >= Self::MAX_LEN {
return None;
}
Some(Self((pos << Self::LEN_BITS | len) as u32))
}
pub fn view(self, slice: &[Id]) -> &[Id] {
&slice[self.0 as usize >> Self::LEN_BITS..][..self.0 as usize & Self::LEN_MASK]
}
pub fn len(self) -> usize {
self.0 as usize & Self::LEN_MASK
}
pub fn is_empty(self) -> bool {
self.0 == 0
}
pub fn empty() -> Self {
Self(0)
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct Id(NonZeroU32);
impl Default for Id {
fn default() -> Self {
Self(unsafe { NonZeroU32::new_unchecked(UNDECLARED) })
}
}
impl Id {
pub const fn from_bt(bt: u32) -> Self {
Self(unsafe { NonZeroU32::new_unchecked(bt) })
}
pub fn is_signed(self) -> bool {
(I8..=INT).contains(&self.repr())
}
pub fn is_unsigned(self) -> bool {
(U8..=UINT).contains(&self.repr())
}
pub fn strip_pointer(self) -> Self {
match self.expand() {
Kind::Ptr(_) => Id::from(INT),
_ => self,
}
}
pub fn is_pointer(self) -> bool {
matches!(Kind::from_ty(self), Kind::Ptr(_))
}
pub fn try_upcast(self, ob: Self) -> Option {
let (oa, ob) = (Self(self.0.min(ob.0)), Self(self.0.max(ob.0)));
let (a, b) = (oa.strip_pointer(), ob.strip_pointer());
Some(match () {
_ if oa == ob => oa,
_ if a.is_signed() && b.is_signed() || a.is_unsigned() && b.is_unsigned() => ob,
_ if a.is_unsigned() && b.is_signed() && a.repr() - U8 < b.repr() - I8 => ob,
_ => return None,
})
}
pub fn expand(self) -> Kind {
Kind::from_ty(self)
}
pub const fn repr(self) -> u32 {
self.0.get()
}
}
impl From for Id {
fn from(id: u32) -> Self {
Self(unsafe { NonZeroU32::new_unchecked(id) })
}
}
const fn array_to_lower_case(array: [u8; N]) -> [u8; N] {
let mut result = [0; N];
let mut i = 0;
while i < N {
result[i] = array[i].to_ascii_lowercase();
i += 1;
}
result
}
// const string to lower case
macro_rules! builtin_type {
($($name:ident;)*) => {
$(pub const $name: Builtin = ${index(0)} + 1;)*
mod __lc_names {
use super::*;
$(pub const $name: &[u8] = &array_to_lower_case(unsafe {
*(stringify!($name).as_ptr() as *const [u8; stringify!($name).len()]) });)*
}
pub fn from_str(name: &str) -> Option {
match name.as_bytes() {
$(__lc_names::$name => Some($name),)*
_ => None,
}
}
pub fn to_str(ty: Builtin) -> &'static str {
match ty {
$($name => unsafe { std::str::from_utf8_unchecked(__lc_names::$name) },)*
v => unreachable!("invalid type: {}", v),
}
}
};
}
builtin_type! {
UNDECLARED;
NEVER;
VOID;
TYPE;
BOOL;
U8;
U16;
U32;
UINT;
I8;
I16;
I32;
INT;
}
macro_rules! type_kind {
($(#[$meta:meta])* $vis:vis enum $name:ident {$( $variant:ident, )*}) => {
$(#[$meta])*
$vis enum $name {
$($variant($variant),)*
}
impl $name {
const FLAG_BITS: u32 = (${count($variant)} as u32).next_power_of_two().ilog2();
const FLAG_OFFSET: u32 = std::mem::size_of::() as u32 * 8 - Self::FLAG_BITS;
const INDEX_MASK: u32 = (1 << (32 - Self::FLAG_BITS)) - 1;
$vis const fn from_ty(ty: Id) -> Self {
let (flag, index) = (ty.repr() >> Self::FLAG_OFFSET, ty.repr() & Self::INDEX_MASK);
match flag {
$(${index(0)} => Self::$variant(index),)*
_ => unreachable!(),
}
}
$vis const fn compress(self) -> Id {
let (index, flag) = match self {
$(Self::$variant(index) => (index, ${index(0)}),)*
};
Id(unsafe { NonZeroU32::new_unchecked((flag << Self::FLAG_OFFSET) | index) })
}
$vis const fn inner(self) -> u32 {
match self {
$(Self::$variant(index) => index,)*
}
}
}
};
}
type_kind! {
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Kind {
Builtin,
Struct,
Ptr,
Func,
Global,
Module,
}
}
impl Default for Kind {
fn default() -> Self {
Self::Builtin(UNDECLARED)
}
}
pub struct Display<'a> {
tys: &'a super::Types,
files: &'a [parser::Ast],
ty: Id,
}
impl<'a> Display<'a> {
pub(super) fn new(tys: &'a super::Types, files: &'a [parser::Ast], ty: Id) -> Self {
Self { tys, files, ty }
}
fn rety(&self, ty: Id) -> Self {
Self::new(self.tys, self.files, ty)
}
}
impl<'a> std::fmt::Display for Display<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use Kind as TK;
match TK::from_ty(self.ty) {
TK::Module(idx) => write!(f, "module{}", idx),
TK::Builtin(ty) => write!(f, "{}", to_str(ty)),
TK::Ptr(ty) => {
write!(f, "^{}", self.rety(self.tys.ptrs[ty as usize].base))
}
_ if let Some((key, _)) = self
.tys
.syms
.iter()
.find(|(sym, &ty)| sym.file != u32::MAX && ty == self.ty)
&& let Some(name) = self.files[key.file as usize].exprs().iter().find_map(
|expr| match expr {
Expr::BinOp {
left: &Expr::Ident { name, id, .. },
op: TokenKind::Decl,
..
} if id == key.ident => Some(name),
_ => None,
},
) =>
{
write!(f, "{name}")
}
TK::Struct(idx) => {
let record = &self.tys.structs[idx as usize];
write!(f, "{{")?;
for (i, &super::Field { ref name, ty }) in record.fields.iter().enumerate() {
if i != 0 {
write!(f, ", ")?;
}
write!(f, "{name}: {}", self.rety(ty))?;
}
write!(f, "}}")
}
TK::Func(idx) => write!(f, "fn{idx}"),
TK::Global(idx) => write!(f, "global{idx}"),
}
}
}
}
#[derive(Clone, Copy, Debug)]
struct Reloc {
offset: Offset,
/// code_offset - sub_offset = instr_offset
sub_offset: u8,
width: u8,
}
impl Reloc {
fn new(offset: u32, sub_offset: u8, width: u8) -> Self {
Self {
offset,
sub_offset,
width,
}
}
fn apply_stack_offset(&self, code: &mut [u8], stack: &stack::Alloc) {
log::err!("faaah: {:x}", self.offset);
log::err!("{:x?} {}", &code[self.offset as usize..], self.sub_offset);
let bytes = &code[self.offset as usize..][..self.width as usize];
let (id, off) = Self::unpack_srel(u64::from_ne_bytes(bytes.try_into().unwrap()));
self.write_offset(code, stack.final_offset(id, off) as i64);
}
fn pack_srel(id: &stack::Id, off: u32) -> u64 {
((id.repr() as u64) << 32) | (off as u64)
}
fn unpack_srel(id: u64) -> (u32, u32) {
((id >> 32) as u32, id as u32)
}
fn apply_jump(&self, code: &mut [u8], to: u32) {
let offset = to as i64 - self.offset as i64;
self.write_offset(code, offset);
}
fn write_offset(&self, code: &mut [u8], offset: i64) {
let bytes = offset.to_ne_bytes();
let slice =
&mut code[self.offset as usize + self.sub_offset as usize..][..self.width as usize];
slice.copy_from_slice(&bytes[..self.width as usize]);
}
}
struct Value {
ty: ty::Id,
loc: Loc,
}
impl Value {
fn new(ty: impl Into, loc: impl Into) -> Self {
Self {
ty: ty.into(),
loc: loc.into(),
}
}
fn void() -> Self {
Self {
ty: ty::VOID.into(),
loc: Loc::imm(0),
}
}
fn imm(value: u64) -> Self {
Self {
ty: ty::UINT.into(),
loc: Loc::imm(value),
}
}
fn ty(ty: ty::Id) -> Self {
Self {
ty: ty::TYPE.into(),
loc: Loc::Ct {
value: (ty.repr() as u64).to_ne_bytes(),
},
}
}
}
enum LocCow<'a> {
Ref(&'a Loc),
Owned(Loc),
}
impl<'a> LocCow<'a> {
fn as_ref(&self) -> &Loc {
match self {
Self::Ref(value) => value,
Self::Owned(value) => value,
}
}
}
impl<'a> From<&'a Loc> for LocCow<'a> {
fn from(value: &'a Loc) -> Self {
Self::Ref(value)
}
}
impl<'a> From for LocCow<'a> {
fn from(value: Loc) -> Self {
Self::Owned(value)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Loc {
Rt {
derefed: bool,
reg: reg::Id,
stack: Option,
offset: Offset,
},
Ct {
value: [u8; 8],
},
}
impl Loc {
fn stack(stack: stack::Id) -> Self {
Self::Rt {
stack: Some(stack),
reg: reg::STACK_PTR.into(),
derefed: true,
offset: 0,
}
}
fn reg(reg: impl Into) -> Self {
let reg = reg.into();
assert!(reg.get() != 0);
Self::Rt {
derefed: false,
reg,
stack: None,
offset: 0,
}
}
fn imm(value: u64) -> Self {
Self::Ct {
value: value.to_ne_bytes(),
}
}
fn ty(ty: ty::Id) -> Self {
Self::imm(ty.repr() as _)
}
fn offset(mut self, offset: u32) -> Self {
match &mut self {
Self::Rt { offset: off, .. } => *off += offset,
_ => unreachable!("offseting constant"),
}
self
}
fn as_ref(&self) -> Self {
match *self {
Loc::Rt {
derefed,
ref reg,
ref stack,
offset,
} => Loc::Rt {
derefed,
reg: reg.as_ref(),
stack: stack.as_ref().map(stack::Id::as_ref),
offset,
},
Loc::Ct { value } => Self::Ct { value },
}
}
fn into_derefed(mut self) -> Self {
match &mut self {
Self::Rt { derefed, .. } => *derefed = true,
_ => unreachable!(),
}
self
}
fn assert_valid(&self) {
assert!(!matches!(self, Self::Rt { reg, .. } if reg.get() == 0));
}
fn take_owned(&mut self) -> Option {
if self.is_ref() {
return None;
}
Some(std::mem::replace(self, self.as_ref()))
}
fn is_ref(&self) -> bool {
matches!(self, Self::Rt { reg, stack, .. } if reg.is_ref() && stack.as_ref().map_or(true, stack::Id::is_ref))
}
}
impl From for Loc {
fn from(reg: reg::Id) -> Self {
Loc::reg(reg)
}
}
impl Default for Loc {
fn default() -> Self {
Self::Ct { value: [0; 8] }
}
}
struct Loop {
var_count: u32,
offset: u32,
reloc_base: u32,
}
struct Variable {
id: Ident,
value: Value,
}
#[derive(Default)]
struct ItemCtx {
file: FileId,
id: ty::Kind,
ret: ty::Id,
ret_reg: reg::Id,
task_base: usize,
snap: Snapshot,
stack: stack::Alloc,
regs: reg::Alloc,
stack_relocs: Vec,
ret_relocs: Vec,
loop_relocs: Vec,
loops: Vec,
vars: Vec,
}
impl ItemCtx {
// pub fn dup_loc(&mut self, loc: &Loc) -> Loc {
// match *loc {
// Loc::Rt {
// derefed,
// ref reg,
// ref stack,
// offset,
// } => Loc::Rt {
// reg: reg.as_ref(),
// derefed,
// stack: stack.as_ref().map(|s| self.stack.dup_id(s)),
// offset,
// },
// Loc::Ct { value } => Loc::Ct { value },
// }
// }
fn finalize(&mut self, output: &mut Output) {
self.stack.finalize_leaked();
for rel in self.stack_relocs.drain(..) {
rel.apply_stack_offset(&mut output.code[self.snap.code..], &self.stack)
}
let ret_offset = output.code.len() - self.snap.code;
for rel in self.ret_relocs.drain(..) {
rel.apply_jump(&mut output.code[self.snap.code..], ret_offset as _);
}
self.finalize_frame(output);
self.stack.clear();
debug_assert!(self.loops.is_empty());
debug_assert!(self.loop_relocs.is_empty());
debug_assert!(self.vars.is_empty());
}
fn finalize_frame(&mut self, output: &mut Output) {
let mut cursor = self.snap.code;
let mut allocate = |size| (cursor += size, cursor).1;
let pushed = self.regs.pushed_size() as i64;
let stack = self.stack.max_height as i64;
write_reloc(&mut output.code, allocate(3), -(pushed + stack), 8);
write_reloc(&mut output.code, allocate(8 + 3), stack, 8);
write_reloc(&mut output.code, allocate(8), pushed, 2);
output.emit(ld(RET_ADDR, STACK_PTR, stack as _, pushed as _));
output.emit(addi64(STACK_PTR, STACK_PTR, (pushed + stack) as _));
}
fn free_loc(&mut self, src: impl Into) {
if let LocCow::Owned(Loc::Rt { reg, stack, .. }) = src.into() {
self.regs.free(reg);
if let Some(stack) = stack {
self.stack.free(stack);
}
}
}
}
fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) {
let value = value.to_ne_bytes();
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
}
#[derive(PartialEq, Eq, Hash)]
struct SymKey {
file: u32,
ident: u32,
}
impl SymKey {
pub fn pointer_to(ty: ty::Id) -> Self {
Self {
file: u32::MAX,
ident: ty.repr(),
}
}
}
#[derive(Clone, Copy)]
struct Func {
// if the most significant bit is 1, its considered to be an task id
offset: Offset,
args: ty::Tuple,
ret: ty::Id,
}
struct Global {
offset: Offset,
ty: ty::Id,
}
struct Field {
name: Rc,
ty: ty::Id,
}
struct Struct {
fields: Rc<[Field]>,
}
struct Ptr {
base: ty::Id,
}
struct ParamAlloc(Range);
impl ParamAlloc {
pub fn next(&mut self) -> u8 {
self.0.next().expect("too many paramteters")
}
fn next_wide(&mut self) -> u8 {
(self.next(), self.next()).0
}
}
#[derive(Default)]
struct Types {
syms: HashMap,
funcs: Vec,
args: Vec,
globals: Vec,
structs: Vec,
ptrs: Vec,
}
impl Types {
fn parama(&self, ret: impl Into) -> ParamAlloc {
ParamAlloc(2 + (9..=16).contains(&self.size_of(ret.into())) as u8..12)
}
fn offset_of(&self, idx: ty::Struct, field: Result<&str, usize>) -> Option<(Offset, ty::Id)> {
let record = &self.structs[idx as usize];
let until = match field {
Ok(str) => record.fields.iter().position(|f| f.name.as_ref() == str)?,
Err(i) => i,
};
let mut offset = 0;
for &Field { ty, .. } in &record.fields[..until] {
offset = Self::align_up(offset, self.align_of(ty));
offset += self.size_of(ty);
}
Some((offset, record.fields[until].ty))
}
fn make_ptr(&mut self, base: ty::Id) -> ty::Id {
ty::Kind::Ptr(self.make_ptr_low(base)).compress()
}
fn make_ptr_low(&mut self, base: ty::Id) -> ty::Ptr {
let id = SymKey::pointer_to(base);
self.syms
.entry(id)
.or_insert_with(|| {
self.ptrs.push(Ptr { base });
ty::Kind::Ptr(self.ptrs.len() as u32 - 1).compress()
})
.expand()
.inner()
}
fn align_up(value: Size, align: Size) -> Size {
(value + align - 1) & !(align - 1)
}
fn size_of(&self, ty: ty::Id) -> Size {
match ty.expand() {
ty::Kind::Ptr(_) => 8,
ty::Kind::Builtin(ty::VOID) => 0,
ty::Kind::Builtin(ty::NEVER) => unreachable!(),
ty::Kind::Builtin(ty::INT | ty::UINT) => 8,
ty::Kind::Builtin(ty::I32 | ty::U32 | ty::TYPE) => 4,
ty::Kind::Builtin(ty::I16 | ty::U16) => 2,
ty::Kind::Builtin(ty::I8 | ty::U8 | ty::BOOL) => 1,
ty::Kind::Struct(ty) => {
let mut offset = 0u32;
let record = &self.structs[ty as usize];
for &Field { ty, .. } in record.fields.iter() {
let align = self.align_of(ty);
offset = Self::align_up(offset, align);
offset += self.size_of(ty);
}
offset
}
ty => unimplemented!("size_of: {:?}", ty),
}
}
fn align_of(&self, ty: ty::Id) -> Size {
match ty.expand() {
ty::Kind::Struct(t) => self.structs[t as usize]
.fields
.iter()
.map(|&Field { ty, .. }| self.align_of(ty))
.max()
.unwrap(),
_ => self.size_of(ty).max(1),
}
}
}
mod task {
use super::Offset;
pub fn unpack(offset: Offset) -> Result {
if offset >> 31 != 0 {
Err((offset & !(1 << 31)) as usize)
} else {
Ok(offset)
}
}
pub fn id(index: usize) -> Offset {
1 << 31 | index as u32
}
}
struct FTask {
file: FileId,
expr: ExprRef,
id: ty::Func,
}
#[derive(Default, Clone, Copy)]
pub struct Snapshot {
code: usize,
funcs: usize,
globals: usize,
}
#[derive(Default)]
struct Output {
code: Vec,
funcs: Vec<(ty::Func, Reloc)>,
globals: Vec<(ty::Global, Reloc)>,
}
impl Output {
fn emit_addi(&mut self, dest: u8, op: u8, delta: u64) {
if delta == 0 {
if dest != op {
self.emit(cp(dest, op));
}
return;
}
self.emit(addi64(dest, op, delta));
}
fn emit(&mut self, (len, instr): (usize, [u8; instrs::MAX_SIZE])) {
let name = instrs::NAMES[instr[0] as usize];
log::dbg!(
"{:08x}: {}: {}",
self.code.len(),
name,
instr
.iter()
.take(len)
.skip(1)
.map(|b| format!("{:02x}", b))
.collect::()
);
self.code.extend_from_slice(&instr[..len]);
}
fn emit_prelude(&mut self) {
self.emit(instrs::addi64(STACK_PTR, STACK_PTR, 0));
self.emit(instrs::st(RET_ADDR, STACK_PTR, 0, 0));
}
fn emit_entry_prelude(&mut self) {
self.emit(jal(RET_ADDR, reg::ZERO, 0));
self.emit(tx());
}
fn append(&mut self, val: &mut Self) {
for (_, rel) in val.globals.iter_mut().chain(&mut val.funcs) {
rel.offset += self.code.len() as Offset;
}
self.code.append(&mut val.code);
self.funcs.append(&mut val.funcs);
self.globals.append(&mut val.globals);
}
fn pop(&mut self, stash: &mut Self, snap: &Snapshot) {
for (_, rel) in self.globals[snap.globals..]
.iter_mut()
.chain(&mut self.funcs[snap.funcs..])
{
rel.offset -= snap.code as Offset;
rel.offset += stash.code.len() as Offset;
}
stash.code.extend(self.code.drain(snap.code..));
stash.funcs.extend(self.funcs.drain(snap.funcs..));
stash.globals.extend(self.globals.drain(snap.globals..));
}
fn trunc(&mut self, snap: &Snapshot) {
self.code.truncate(snap.code);
self.globals.truncate(snap.globals);
self.funcs.truncate(snap.funcs);
}
fn write_trap(&mut self, trap: Trap) {
let len = self.code.len();
self.code.resize(len + std::mem::size_of::(), 0);
unsafe { std::ptr::write_unaligned(self.code.as_mut_ptr().add(len) as _, trap) }
}
fn snap(&mut self) -> Snapshot {
Snapshot {
code: self.code.len(),
funcs: self.funcs.len(),
globals: self.globals.len(),
}
}
fn emit_call(&mut self, func_id: ty::Func) {
let reloc = Reloc::new(self.code.len() as _, 3, 4);
self.funcs.push((func_id, reloc));
self.emit(jal(RET_ADDR, ZERO, 0));
}
}
#[derive(Default, Debug)]
struct Ctx {
loc: Option,
ty: Option,
}
impl Ctx {
pub fn with_loc(self, loc: Loc) -> Self {
Self {
loc: Some(loc),
..self
}
}
pub fn with_ty(self, ty: impl Into) -> Self {
Self {
ty: Some(ty.into()),
..self
}
}
fn into_value(self) -> Option {
Some(Value {
ty: self.ty.unwrap(),
loc: self.loc?,
})
}
}
impl From for Ctx {
fn from(value: Value) -> Self {
Self {
loc: Some(value.loc),
ty: Some(value.ty),
}
}
}
#[derive(Default)]
struct Pool {
cis: Vec,
outputs: Vec