This commit is contained in:
Jakub Doka 2024-11-07 08:52:41 +01:00
parent 79a3f1ab2b
commit 2bac7c1fb3
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
9 changed files with 2010 additions and 2561 deletions

2
.gitignore vendored
View file

@ -9,5 +9,5 @@ db.sqlite-journal
# assets
/depell/src/*.gz
/depell/src/*.wasm
**/*-sv.rs
#**/*-sv.rs
/bytecode/src/instrs.rs

1347
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,10 +8,10 @@ members = [
"xrt",
"xtask",
"lang",
"depell",
"depell/wasm-fmt",
"depell/wasm-hbc",
"depell/wasm-rt",
#"depell",
#"depell/wasm-fmt",
#"depell/wasm-hbc",
#"depell/wasm-rt",
]
[workspace.dependencies]

View file

@ -1,150 +1,2 @@
use {crate::reg::Reg, alloc::vec::Vec, core::ops::Range};
type Nid = u16;
pub trait Ctx {
fn uses_of(&self, nid: Nid) -> impl Iterator<Item = Nid>;
fn params_of(&self, nid: Nid) -> impl Iterator<Item = Nid>;
fn args_of(&self, nid: Nid) -> impl Iterator<Item = Nid>;
fn dom_of(&self, nid: Nid) -> Nid;
}
pub struct Env<'a, C: Ctx> {
ctx: &'a C,
func: &'a Func,
res: &'a mut Res,
}
impl<'a, C: Ctx> Env<'a, C> {
pub fn new(ctx: &'a C, func: &'a Func, res: &'a mut Res) -> Self {
Self { ctx, func, res }
}
pub fn run(&mut self) {
self.res.reg_to_node.clear();
self.res.reg_to_node.resize(self.func.instrs.len(), 0);
let mut bundle = Bundle::new(self.func.instrs.len());
for &inst in &self.func.instrs {
for uinst in self.ctx.uses_of(inst) {
let mut cursor = self.ctx.dom_of(uinst);
while cursor != self.ctx.dom_of(inst) {
let mut range = self.func.blocks
[self.func.id_to_block[cursor as usize] as usize]
.range
.clone();
range.start = range.start.max(inst as usize);
range.end = range.end.min(uinst as usize);
bundle.add(range);
cursor = self.ctx.dom_of(cursor);
}
}
match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(&bundle)) {
Some((i, other)) => {
other.merge(&bundle);
bundle.clear();
self.res.reg_to_node[inst as usize] = i as Reg;
}
None => {
self.res.reg_to_node[inst as usize] = self.res.bundles.len() as Reg;
self.res.bundles.push(bundle);
bundle = Bundle::new(self.func.instrs.len());
}
}
}
}
}
pub struct Res {
bundles: Vec<Bundle>,
pub reg_to_node: Vec<Reg>,
}
pub struct Bundle {
//unit_range: Range<usize>,
//set: BitSet,
taken: Vec<bool>,
}
impl Bundle {
fn new(size: usize) -> Self {
Self { taken: vec![false; size] }
}
fn add(&mut self, range: Range<usize>) {
self.taken[range].fill(true);
}
fn overlaps(&self, other: &Self) -> bool {
self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
}
fn merge(&mut self, other: &Self) {
debug_assert!(!self.overlaps(other));
self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a = *b);
}
fn clear(&mut self) {
self.taken.fill(false);
}
//fn overlaps(&self, other: &Self) -> bool {
// if self.unit_range.start >= other.unit_range.end
// || self.unit_range.end <= other.unit_range.start
// {
// return false;
// }
// let [mut a, mut b] = [self, other];
// if a.unit_range.start > b.unit_range.start {
// mem::swap(&mut a, &mut b);
// }
// let [mut tmp_a, mut tmp_b] = [0; 2];
// let [units_a, units_b] = [a.set.units(&mut tmp_a), b.set.units(&mut tmp_b)];
// let len = a.unit_range.len().min(b.unit_range.len());
// let [units_a, units_b] =
// [&units_a[b.unit_range.start - a.unit_range.start..][..len], &units_b[..len]];
// units_a.iter().zip(units_b).any(|(&a, &b)| a & b != 0)
//}
//fn merge(mut self, mut other: Self) -> Self {
// debug_assert!(!self.overlaps(&other));
// if self.unit_range.start > other.unit_range.start {
// mem::swap(&mut self, &mut other);
// }
// let final_range = self.unit_range.start..self.unit_range.end.max(other.unit_range.end);
// self.set.reserve(final_range.len());
// let mut tmp = 0;
// let other_units = other.set.units(&mut tmp);
// match self.set.units_mut() {
// Ok(units) => {
// units[other.unit_range.start - self.unit_range.start..]
// .iter_mut()
// .zip(other_units)
// .for_each(|(a, b)| *a |= b);
// }
// Err(view) => view.add_mask(tmp),
// }
// self
//}
}
pub struct Func {
pub blocks: Vec<Block>,
pub instrs: Vec<Nid>,
pub id_to_instr: Vec<Nid>,
pub id_to_block: Vec<Nid>,
}
pub struct Block {
pub range: Range<usize>,
pub start_id: Nid,
pub eld_id: Nid,
}

View file

@ -44,7 +44,7 @@ type Nid = u16;
type AClassId = i16;
pub struct AssemblySpec {
entry: u64,
entry: u32,
code_length: u64,
data_length: u64,
}
@ -118,16 +118,16 @@ impl Default for Nodes {
}
impl Nodes {
fn loop_depth(&mut self, target: Nid) -> LoopDepth {
if self[target].loop_depth != 0 {
return self[target].loop_depth;
fn loop_depth(&self, target: Nid) -> LoopDepth {
if self[target].loop_depth.get() != 0 {
return self[target].loop_depth.get();
}
self[target].loop_depth = match self[target].kind {
self[target].loop_depth.set(match self[target].kind {
Kind::Entry | Kind::Then | Kind::Else | Kind::Call { .. } | Kind::Return | Kind::If => {
let dpth = self.loop_depth(self[target].inputs[0]);
if self[target].loop_depth != 0 {
return self[target].loop_depth;
if self[target].loop_depth.get() != 0 {
return self[target].loop_depth.get();
}
dpth
}
@ -139,10 +139,10 @@ impl Nodes {
}
Kind::Loop => {
let depth = Self::loop_depth(self, self[target].inputs[0]) + 1;
self[target].loop_depth = depth;
self[target].loop_depth.set(depth);
let mut cursor = self[target].inputs[1];
while cursor != target {
self[cursor].loop_depth = depth;
self[cursor].loop_depth.set(depth);
let next = if self[cursor].kind == Kind::Region {
self.loop_depth(self[cursor].inputs[0]);
self[cursor].inputs[1]
@ -156,8 +156,8 @@ impl Nodes {
.iter()
.find(|&&n| self[n].kind != self[cursor].kind)
.unwrap();
if self[other].loop_depth == 0 {
self[other].loop_depth = depth - 1;
if self[other].loop_depth.get() == 0 {
self[other].loop_depth.set(depth - 1);
}
}
cursor = next;
@ -166,9 +166,9 @@ impl Nodes {
}
Kind::Start | Kind::End | Kind::Die => 1,
u => unreachable!("{u:?}"),
};
});
self[target].loop_depth
self[target].loop_depth.get()
}
fn idepth(&self, target: Nid) -> IDomDepth {
@ -444,7 +444,7 @@ impl Nodes {
self[to].inputs.push(from);
}
fn use_block(&mut self, target: Nid, from: Nid) -> Nid {
fn use_block(&self, target: Nid, from: Nid) -> Nid {
if self[from].kind != Kind::Phi {
return self.idom(from);
}
@ -1428,7 +1428,7 @@ impl Nodes {
out,
"region{node}: {} {} {:?}",
self[node].depth.get(),
self[node].loop_depth,
self[node].loop_depth.get(),
self[node].inputs
)?;
let mut cfg_index = Nid::MAX;
@ -1445,7 +1445,7 @@ impl Nodes {
out,
"loop{node}: {} {} {:?}",
self[node].depth.get(),
self[node].loop_depth,
self[node].loop_depth.get(),
self[node].outputs
)?;
let mut cfg_index = Nid::MAX;
@ -1465,7 +1465,7 @@ impl Nodes {
out,
"b{node}: {} {} {:?}",
self[node].depth.get(),
self[node].loop_depth,
self[node].loop_depth.get(),
self[node].outputs
)?;
let mut cfg_index = Nid::MAX;
@ -1621,6 +1621,15 @@ impl Nodes {
dominated = self.idom(dominated);
}
}
fn is_data_dep(&self, nid: Nid, n: Nid) -> bool {
match self[n].kind {
_ if self.is_cfg(n) && !matches!(self[n].kind, Kind::Call { .. }) => false,
Kind::Stre => self[n].inputs[3] != nid,
Kind::Load => self[n].inputs[2] != nid,
_ => self[n].inputs[0] != nid || self[n].inputs[1..].contains(&nid),
}
}
}
enum CondOptRes {
@ -1737,6 +1746,10 @@ impl Kind {
matches!(self, Self::Return | Self::If | Self::End | Self::Die)
}
fn starts_basic_block(&self) -> bool {
matches!(self, Self::Region | Self::Loop | Self::Start | Kind::Then | Kind::Else)
}
fn is_peeped(&self) -> bool {
!matches!(self, Self::End | Self::Arg | Self::Mem | Self::Loops)
}
@ -1766,7 +1779,7 @@ pub struct Node {
ty: ty::Id,
depth: Cell<IDomDepth>,
lock_rc: LockRc,
loop_depth: LoopDepth,
loop_depth: Cell<LoopDepth>,
aclass: AClassId,
antidep: Nid,
}
@ -1793,9 +1806,12 @@ impl Node {
fn is_mem(&self) -> bool {
matches!(self.kind, Kind::Stre | Kind::Load | Kind::Stck)
}
fn is_data_phi(&self) -> bool {
self.kind == Kind::Phi && self.ty != ty::Id::VOID
}
}
type RallocBRef = u16;
type LoopDepth = u16;
type LockRc = u16;
type IDomDepth = u16;
@ -2272,8 +2288,7 @@ impl<'a> Codegen<'a> {
// TODO: return them back
let entry =
self.ct_backend.assemble_reachable(fuc, self.tys, &mut self.ct.code).entry as u32;
let entry = self.ct_backend.assemble_reachable(fuc, self.tys, &mut self.ct.code).entry;
#[cfg(debug_assertions)]
{
@ -4516,10 +4531,9 @@ mod tests {
let err = codegen.disasm(output, &out);
if let Err(e) = err {
writeln!(output, "!!! asm is invalid: {e}").unwrap();
return;
} else {
super::hbvm::test_run_vm(&out, output);
}
super::hbvm::test_run_vm(&out, output);
}
crate::run_tests! { generate:

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,907 @@
use {
super::{HbvmBackend, Nid, Nodes},
crate::{
lexer::TokenKind,
parser,
reg::{self, Reg},
son::{debug_assert_matches, Kind, ARG_START, MEM, VOID},
ty::{self, Arg, Loc},
utils::{BitSet, Vc},
Offset, PLoc, Reloc, Sig, TypedReloc, Types,
},
alloc::{borrow::ToOwned, vec::Vec},
core::{mem, ops::Range},
hbbytecode::{self as instrs},
};
impl HbvmBackend {
pub fn emit_body_code_my(
&mut self,
nodes: &mut Nodes,
sig: Sig,
tys: &Types,
files: &[parser::Ast],
) -> (usize, bool) {
let mut fuc = Function::new(nodes, tys, sig);
log::info!("{fuc:?}");
let mut res = mem::take(&mut self.ralloc_my);
Env::new(&fuc, &fuc.func, &mut res).run();
'_open_function: {
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
self.emit(instrs::st(reg::RET_ADDR + fuc.tail as u8, reg::STACK_PTR, 0, 0));
}
let reg_offset = if fuc.tail { reg::RET + 12 } else { reg::RET_ADDR + 1 };
res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| {
*r += reg_offset - 1;
if fuc.tail && *r >= reg::RET_ADDR {
*r += 1;
}
});
let atr = |allc: Nid| res.node_to_reg[allc as usize];
//for (id, node) in fuc.nodes.iter() {
// if node.kind == Kind::Phi {
// debug_assert_eq!(atr(node.inputs[1]), atr(node.inputs[2]));
// debug_assert_eq!(atr(id), atr(node.inputs[2]));
// }
//}
let (retl, mut parama) = tys.parama(sig.ret);
let mut typs = sig.args.args();
let mut args = fuc.nodes[VOID].outputs[ARG_START..].iter();
while let Some(aty) = typs.next(tys) {
let Arg::Value(ty) = aty else { continue };
let Some(loc) = parama.next(ty, tys) else { continue };
let &arg = args.next().unwrap();
let (rg, size) = match loc {
PLoc::WideReg(rg, size) => (rg, size),
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
PLoc::Reg(r, ..) | PLoc::Ref(r, ..) => {
self.emit(instrs::cp(atr(arg), r));
continue;
}
};
self.emit(instrs::st(rg, reg::STACK_PTR, self.offsets[arg as usize] as _, size));
if fuc.nodes[arg].lock_rc == 0 {
self.emit(instrs::addi64(rg, reg::STACK_PTR, self.offsets[arg as usize] as _));
}
self.emit(instrs::cp(atr(arg), rg));
}
for (i, block) in fuc.func.blocks.iter().enumerate() {
self.offsets[block.entry as usize] = self.code.len() as _;
for &nid in &fuc.func.instrs[block.range.clone()] {
if nid == VOID {
continue;
}
let node = &fuc.nodes[nid];
let extend = |base: ty::Id, dest: ty::Id, from: Nid, to: Nid| {
let (bsize, dsize) = (tys.size_of(base), tys.size_of(dest));
debug_assert!(bsize <= 8, "{}", ty::Display::new(tys, files, base));
debug_assert!(dsize <= 8, "{}", ty::Display::new(tys, files, dest));
if bsize == dsize {
return Default::default();
}
match (base.is_signed(), dest.is_signed()) {
(true, true) => {
let op = [instrs::sxt8, instrs::sxt16, instrs::sxt32]
[bsize.ilog2() as usize];
op(atr(to), atr(from))
}
_ => {
let mask = (1u64 << (bsize * 8)) - 1;
instrs::andi(atr(to), atr(from), mask)
}
}
};
match node.kind {
Kind::If => {
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
if let Kind::BinOp { op } = fuc.nodes[cnd].kind
&& let Some((op, swapped)) =
op.cond_op(fuc.nodes[fuc.nodes[cnd].inputs[1]].ty)
{
let &[_, lhs, rhs] = fuc.nodes[cnd].inputs.as_slice() else {
unreachable!()
};
self.emit(extend(fuc.nodes[lhs].ty, fuc.nodes[lhs].ty.extend(), 0, 0));
self.emit(extend(fuc.nodes[rhs].ty, fuc.nodes[rhs].ty.extend(), 1, 1));
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[!swapped as usize], rel));
self.emit(op(atr(lhs), atr(rhs), 0));
} else {
self.emit(extend(fuc.nodes[cnd].ty, fuc.nodes[cnd].ty.extend(), 0, 0));
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[0], rel));
self.emit(instrs::jne(atr(cnd), reg::ZERO, 0));
}
}
Kind::Loop | Kind::Region => {
if (mem::replace(&mut fuc.backrefs[nid as usize], u16::MAX) != u16::MAX)
^ (node.kind == Kind::Loop)
{
let index = (node.kind == Kind::Loop) as usize + 1;
for &out in node.outputs.iter() {
if fuc.nodes[out].is_data_phi()
&& atr(out) != atr(fuc.nodes[out].inputs[index])
{
self.emit(instrs::cp(
atr(out),
atr(fuc.nodes[out].inputs[index]),
));
}
}
let rel = Reloc::new(self.code.len(), 1, 4);
self.jump_relocs.push((nid, rel));
self.emit(instrs::jmp(0));
} else {
let index = (node.kind != Kind::Loop) as usize + 1;
for &out in node.outputs.iter() {
if fuc.nodes[out].is_data_phi()
&& atr(out) != atr(fuc.nodes[out].inputs[index])
{
self.emit(instrs::cp(
atr(out),
atr(fuc.nodes[out].inputs[index]),
));
}
}
}
}
Kind::Return => {
let &[_, mut ret, ..] = node.inputs.as_slice() else { unreachable!() };
match retl {
None => {}
Some(PLoc::Reg(r, _)) if sig.ret.loc(tys) == Loc::Reg => {
self.emit(instrs::cp(r, atr(ret)));
}
Some(PLoc::Reg(r, size)) | Some(PLoc::WideReg(r, size)) => {
ret = match fuc.nodes[ret].kind {
Kind::Load { .. } => fuc.nodes[ret].inputs[1],
_ => ret,
};
self.emit(instrs::ld(r, atr(ret), 0, size))
}
Some(PLoc::Ref(_, size)) => {
ret = match fuc.nodes[ret].kind {
Kind::Load { .. } => fuc.nodes[ret].inputs[1],
_ => ret,
};
let [src, dst] = [atr(ret), atr(MEM)];
if let Ok(size) = u16::try_from(size) {
self.emit(instrs::bmc(src, dst, size));
} else {
for _ in 0..size / u16::MAX as u32 {
self.emit(instrs::bmc(src, dst, u16::MAX));
self.emit(instrs::addi64(src, src, u16::MAX as _));
self.emit(instrs::addi64(dst, dst, u16::MAX as _));
}
self.emit(instrs::bmc(src, dst, size as u16));
self.emit(instrs::addi64(src, src, size.wrapping_neg() as _));
self.emit(instrs::addi64(dst, dst, size.wrapping_neg() as _));
}
}
}
if i != fuc.func.blocks.len() - 1 {
let rel = Reloc::new(self.code.len(), 1, 4);
self.ret_relocs.push(rel);
self.emit(instrs::jmp(0));
}
}
Kind::Die => self.emit(instrs::un()),
Kind::CInt { value } if node.ty.is_float() => {
self.emit(match node.ty {
ty::Id::F32 => instrs::li32(
atr(nid),
(f64::from_bits(value as _) as f32).to_bits(),
),
ty::Id::F64 => instrs::li64(atr(nid), value as _),
_ => unreachable!(),
});
}
Kind::CInt { value } => self.emit(match tys.size_of(node.ty) {
1 => instrs::li8(atr(nid), value as _),
2 => instrs::li16(atr(nid), value as _),
4 => instrs::li32(atr(nid), value as _),
_ => instrs::li64(atr(nid), value as _),
}),
Kind::UnOp { op } => {
let op = op
.unop(node.ty, fuc.nodes[node.inputs[1]].ty)
.expect("TODO: unary operator not supported");
self.emit(op(atr(nid), atr(node.inputs[1])));
}
Kind::BinOp { .. } if node.lock_rc != 0 => {}
Kind::BinOp { op } => {
let &[.., lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
if let Kind::CInt { value } = fuc.nodes[rhs].kind
&& fuc.nodes[rhs].lock_rc != 0
&& let Some(op) = op.imm_binop(node.ty)
{
self.emit(op(atr(nid), atr(lhs), value as _));
} else if let Some(op) =
op.binop(node.ty).or(op.float_cmp(fuc.nodes[lhs].ty))
{
self.emit(op(atr(nid), atr(lhs), atr(rhs)));
} else if let Some(against) = op.cmp_against() {
let op_ty = fuc.nodes[lhs].ty;
self.emit(extend(fuc.nodes[lhs].ty, fuc.nodes[lhs].ty.extend(), 0, 0));
self.emit(extend(fuc.nodes[rhs].ty, fuc.nodes[rhs].ty.extend(), 1, 1));
if op_ty.is_float() && matches!(op, TokenKind::Le | TokenKind::Ge) {
let opop = match op {
TokenKind::Le => TokenKind::Gt,
TokenKind::Ge => TokenKind::Lt,
_ => unreachable!(),
};
let op_fn = opop.float_cmp(op_ty).unwrap();
self.emit(op_fn(atr(nid), atr(lhs), atr(rhs)));
self.emit(instrs::not(atr(nid), atr(nid)));
} else if op_ty.is_integer() {
let op_fn =
if op_ty.is_signed() { instrs::cmps } else { instrs::cmpu };
self.emit(op_fn(atr(nid), atr(lhs), atr(rhs)));
self.emit(instrs::cmpui(atr(nid), atr(nid), against));
if matches!(op, TokenKind::Eq | TokenKind::Lt | TokenKind::Gt) {
self.emit(instrs::not(atr(nid), atr(nid)));
}
} else {
todo!("unhandled operator: {op}");
}
} else {
todo!("unhandled operator: {op}");
}
}
Kind::Call { args, func } => {
let (ret, mut parama) = tys.parama(node.ty);
let mut args = args.args();
let mut allocs = node.inputs[1..].iter();
while let Some(arg) = args.next(tys) {
let Arg::Value(ty) = arg else { continue };
let Some(loc) = parama.next(ty, tys) else { continue };
let mut arg = *allocs.next().unwrap();
let (rg, size) = match loc {
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
PLoc::WideReg(rg, size) => (rg, size),
PLoc::Ref(r, ..) => {
arg = match fuc.nodes[arg].kind {
Kind::Load { .. } => fuc.nodes[arg].inputs[1],
_ => arg,
};
self.emit(instrs::cp(r, atr(arg)));
continue;
}
PLoc::Reg(r, ..) => {
self.emit(instrs::cp(r, atr(arg)));
continue;
}
};
arg = match fuc.nodes[arg].kind {
Kind::Load { .. } => fuc.nodes[arg].inputs[1],
_ => arg,
};
self.emit(instrs::ld(rg, atr(arg), 0, size));
}
debug_assert!(
!matches!(ret, Some(PLoc::Ref(..))) || allocs.next().is_some()
);
if func == ty::ECA {
self.emit(instrs::eca());
} else {
self.relocs.push(TypedReloc {
target: ty::Kind::Func(func).compress(),
reloc: Reloc::new(self.code.len(), 3, 4),
});
self.emit(instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
}
match ret {
Some(PLoc::WideReg(r, size)) => {
debug_assert_eq!(
fuc.nodes[*node.inputs.last().unwrap()].kind,
Kind::Stck
);
let stck = self.offsets[*node.inputs.last().unwrap() as usize];
self.emit(instrs::st(r, reg::STACK_PTR, stck as _, size));
}
Some(PLoc::Reg(r, size)) if node.ty.loc(tys) == Loc::Stack => {
debug_assert_eq!(
fuc.nodes[*node.inputs.last().unwrap()].kind,
Kind::Stck
);
let stck = self.offsets[*node.inputs.last().unwrap() as usize];
self.emit(instrs::st(r, reg::STACK_PTR, stck as _, size));
}
Some(PLoc::Reg(r, ..)) => self.emit(instrs::cp(atr(nid), r)),
None | Some(PLoc::Ref(..)) => {}
}
}
Kind::Global { global } => {
let reloc = Reloc::new(self.code.len(), 3, 4);
self.relocs.push(TypedReloc {
target: ty::Kind::Global(global).compress(),
reloc,
});
self.emit(instrs::lra(atr(nid), 0, 0));
}
Kind::Stck => {
let base = reg::STACK_PTR;
let offset = self.offsets[nid as usize];
self.emit(instrs::addi64(atr(nid), base, offset as _));
}
Kind::Load => {
let mut region = node.inputs[1];
let mut offset = 0;
if fuc.nodes[region].kind == (Kind::BinOp { op: TokenKind::Add })
&& let Kind::CInt { value } =
fuc.nodes[fuc.nodes[region].inputs[2]].kind
{
region = fuc.nodes[region].inputs[1];
offset = value as Offset;
}
let size = tys.size_of(node.ty);
if node.ty.loc(tys) != Loc::Stack {
let (base, offset) = match fuc.nodes[region].kind {
Kind::Stck => {
(reg::STACK_PTR, self.offsets[region as usize] + offset)
}
_ => (atr(region), offset),
};
self.emit(instrs::ld(atr(nid), base, offset as _, size as _));
}
}
Kind::Stre if node.inputs[1] == VOID => {}
Kind::Stre => {
let mut region = node.inputs[2];
let mut offset = 0;
let size = u16::try_from(tys.size_of(node.ty)).expect("TODO");
if fuc.nodes[region].kind == (Kind::BinOp { op: TokenKind::Add })
&& let Kind::CInt { value } =
fuc.nodes[fuc.nodes[region].inputs[2]].kind
&& node.ty.loc(tys) == Loc::Reg
{
region = fuc.nodes[region].inputs[1];
offset = value as Offset;
}
let nd = &fuc.nodes[region];
let value = node.inputs[1];
let (base, offset, src) = match nd.kind {
Kind::Stck if node.ty.loc(tys) == Loc::Reg => {
(reg::STACK_PTR, self.offsets[region as usize] + offset, value)
}
_ => (atr(region), offset, match fuc.nodes[value].kind {
Kind::Load { .. } => fuc.nodes[value].inputs[1],
_ => value,
}),
};
match node.ty.loc(tys) {
Loc::Reg => self.emit(instrs::st(atr(src), base, offset as _, size)),
Loc::Stack => {
debug_assert_eq!(offset, 0);
self.emit(instrs::bmc(atr(src), base, size))
}
}
}
Kind::Mem => self.emit(instrs::cp(atr(MEM), reg::RET)),
Kind::Arg => {}
e @ (Kind::Start
| Kind::Entry
| Kind::End
| Kind::Loops
| Kind::Then
| Kind::Else
| Kind::Phi
| Kind::Assert { .. }) => unreachable!("{e:?}"),
}
}
}
self.ralloc_my = res;
let bundle_count = self.ralloc_my.bundles.len() + (reg_offset as usize);
(
if fuc.tail {
bundle_count.saturating_sub(reg::RET_ADDR as _)
} else {
assert!(bundle_count < reg::STACK_PTR as usize, "TODO: spill memory");
self.ralloc_my.bundles.len()
},
fuc.tail,
)
}
}
pub struct Function<'a> {
sig: Sig,
tail: bool,
backrefs: Vec<u16>,
nodes: &'a mut Nodes,
tys: &'a Types,
visited: BitSet,
func: Func,
}
impl Function<'_> {
fn vreg_count(&self) -> usize {
self.nodes.values.len()
}
fn uses_of(&self, nid: Nid, buf: &mut Vec<Nid>) {
if self.nodes[nid].kind.is_cfg() && !matches!(self.nodes[nid].kind, Kind::Call { .. }) {
return;
}
self.nodes[nid]
.outputs
.iter()
.filter(|&&n| self.nodes.is_data_dep(nid, n))
.collect_into(buf);
}
fn phi_inputs_of(&self, nid: Nid, buf: &mut Vec<Nid>) {
match self.nodes[nid].kind {
Kind::Region => {
for &inp in self.nodes[nid].outputs.as_slice() {
if self.nodes[inp].is_data_phi() {
buf.extend(&self.nodes[inp].inputs[1..]);
buf.push(inp);
}
}
}
Kind::Loop => {
for &inp in self.nodes[nid].outputs.as_slice() {
if self.nodes[inp].is_data_phi() {
buf.push(self.nodes[inp].inputs[1]);
buf.push(inp);
buf.push(self.nodes[inp].inputs[2]);
}
}
}
_ => {}
}
}
fn instr_of(&self, nid: Nid) -> Option<Nid> {
if self.nodes[nid].kind == Kind::Phi || self.nodes[nid].lock_rc != 0 {
return None;
}
debug_assert_ne!(self.backrefs[nid as usize], Nid::MAX, "{:?}", self.nodes[nid]);
Some(self.backrefs[nid as usize])
}
fn block_of(&self, nid: Nid) -> Nid {
debug_assert!(self.nodes[nid].kind.starts_basic_block());
self.backrefs[nid as usize]
}
fn idom_of(&self, mut nid: Nid) -> Nid {
while !self.nodes[nid].kind.starts_basic_block() {
nid = self.nodes.idom(nid);
}
nid
}
fn use_block(&self, inst: Nid, uinst: Nid) -> Nid {
let mut block = self.nodes.use_block(inst, uinst);
while !self.nodes[block].kind.starts_basic_block() {
block = self.nodes.idom(block);
}
block
}
}
impl core::fmt::Debug for Function<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
for block in &self.func.blocks {
writeln!(f, "{:?}", self.nodes[block.entry].kind)?;
for &instr in &self.func.instrs[block.range.clone()] {
writeln!(f, "{:?}", self.nodes[instr].kind)?;
}
}
Ok(())
}
}
impl<'a> Function<'a> {
fn new(nodes: &'a mut Nodes, tys: &'a Types, sig: Sig) -> Self {
let mut s = Self {
backrefs: vec![u16::MAX; nodes.values.len()],
tail: true,
nodes,
tys,
sig,
visited: Default::default(),
func: Default::default(),
};
s.visited.clear(s.nodes.values.len());
s.emit_node(VOID);
s
}
fn add_block(&mut self, entry: Nid) {
self.func
.blocks
.push(Block { range: self.func.instrs.len()..self.func.instrs.len(), entry });
self.backrefs[entry as usize] = self.func.blocks.len() as u16 - 1;
}
fn close_block(&mut self, exit: Nid) {
if !matches!(self.nodes[exit].kind, Kind::Loop | Kind::Region) {
self.add_instr(exit);
} else {
self.func.instrs.push(exit);
}
let prev = self.func.blocks.last_mut().unwrap();
prev.range.end = self.func.instrs.len();
}
fn add_instr(&mut self, nid: Nid) {
debug_assert_ne!(self.nodes[nid].kind, Kind::Loop);
self.backrefs[nid as usize] = self.func.instrs.len() as u16;
self.func.instrs.push(nid);
}
fn emit_node(&mut self, nid: Nid) {
if matches!(self.nodes[nid].kind, Kind::Region | Kind::Loop) {
match (self.nodes[nid].kind, self.visited.set(nid)) {
(Kind::Loop, false) | (Kind::Region, true) => {
self.close_block(nid);
return;
}
_ => {}
}
} else if !self.visited.set(nid) {
return;
}
if self.nodes.is_never_used(nid, self.tys) {
self.nodes.lock(nid);
return;
}
let mut node = self.nodes[nid].clone();
match node.kind {
Kind::Start => {
debug_assert_matches!(self.nodes[node.outputs[0]].kind, Kind::Entry);
self.add_block(VOID);
self.emit_node(node.outputs[0])
}
Kind::If => {
let &[_, cond] = node.inputs.as_slice() else { unreachable!() };
let &[mut then, mut else_] = node.outputs.as_slice() else { unreachable!() };
if let Kind::BinOp { op } = self.nodes[cond].kind
&& let Some((_, swapped)) = op.cond_op(node.ty)
&& swapped
{
mem::swap(&mut then, &mut else_);
}
self.close_block(nid);
self.emit_node(then);
self.emit_node(else_);
}
Kind::Region | Kind::Loop => {
self.close_block(nid);
self.add_block(nid);
self.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
self.emit_node(o);
}
}
Kind::Return | Kind::Die => {
self.close_block(nid);
self.emit_node(node.outputs[0]);
}
Kind::Entry => {
let (ret, mut parama) = self.tys.parama(self.sig.ret);
let mut typs = self.sig.args.args();
#[expect(clippy::unnecessary_to_owned)]
let mut args = self.nodes[VOID].outputs[ARG_START..].to_owned().into_iter();
while let Some(ty) = typs.next_value(self.tys) {
let arg = args.next().unwrap();
debug_assert_eq!(self.nodes[arg].kind, Kind::Arg);
match parama.next(ty, self.tys) {
None => {}
Some(_) => self.add_instr(arg),
}
}
if let Some(PLoc::Ref(..)) = ret {
self.add_instr(MEM);
}
self.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
self.emit_node(o);
}
}
Kind::Then | Kind::Else => {
self.add_block(nid);
self.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
self.emit_node(o);
}
}
Kind::Call { func, .. } => {
self.tail &= func == ty::ECA;
self.add_instr(nid);
self.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
&& self.nodes[o].inputs[1] == nid)
{
self.emit_node(o);
}
}
}
Kind::CInt { .. }
| Kind::BinOp { .. }
| Kind::UnOp { .. }
| Kind::Global { .. }
| Kind::Load { .. }
| Kind::Stre
| Kind::Stck => self.add_instr(nid),
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops => {}
Kind::Assert { .. } => unreachable!(),
}
}
fn reschedule_block(&mut self, from: Nid, outputs: &mut Vc) {
let from = Some(&from);
let mut buf = Vec::with_capacity(outputs.len());
let mut seen = BitSet::default();
seen.clear(self.nodes.values.len());
for &o in outputs.iter() {
if !self.nodes.is_cfg(o) {
continue;
}
seen.set(o);
let mut cursor = buf.len();
buf.push(o);
while let Some(&n) = buf.get(cursor) {
for &i in &self.nodes[n].inputs[1..] {
if from == self.nodes[i].inputs.first()
&& self.nodes[i]
.outputs
.iter()
.all(|&o| self.nodes[o].inputs.first() != from || seen.get(o))
&& seen.set(i)
{
buf.push(i);
}
}
cursor += 1;
}
}
for &o in outputs.iter() {
if !seen.set(o) {
continue;
}
let mut cursor = buf.len();
buf.push(o);
while let Some(&n) = buf.get(cursor) {
for &i in &self.nodes[n].inputs[1..] {
if from == self.nodes[i].inputs.first()
&& self.nodes[i]
.outputs
.iter()
.all(|&o| self.nodes[o].inputs.first() != from || seen.get(o))
&& seen.set(i)
{
buf.push(i);
}
}
cursor += 1;
}
}
debug_assert!(
outputs.len() == buf.len() || outputs.len() == buf.len() + 1,
"{:?} {:?}",
outputs,
buf
);
if buf.len() + 1 == outputs.len() {
outputs.remove(outputs.len() - 1);
}
outputs.copy_from_slice(&buf);
}
}
pub struct Env<'a> {
ctx: &'a Function<'a>,
func: &'a Func,
res: &'a mut Res,
}
impl<'a> Env<'a> {
pub fn new(ctx: &'a Function<'a>, func: &'a Func, res: &'a mut Res) -> Self {
Self { ctx, func, res }
}
pub fn run(&mut self) {
self.res.bundles.clear();
self.res.node_to_reg.clear();
self.res.node_to_reg.resize(self.ctx.vreg_count(), 0);
debug_assert!(self.res.dfs_buf.is_empty());
debug_assert!(self.res.use_buf.is_empty());
debug_assert!(self.res.phi_input_buf.is_empty());
let mut bundle = Bundle::new(self.func.instrs.len());
let mut visited = BitSet::with_capacity(self.ctx.nodes.values.len());
let mut use_buf = mem::take(&mut self.res.use_buf);
let mut phi_input_buf = mem::take(&mut self.res.phi_input_buf);
for block in &self.func.blocks {
self.ctx.phi_inputs_of(block.entry, &mut phi_input_buf);
for param in phi_input_buf.drain(..) {
if !visited.set(param) {
continue;
}
self.append_bundle(param, &mut bundle, &mut use_buf);
}
}
self.res.phi_input_buf = phi_input_buf;
for &inst in &self.func.instrs {
if visited.get(inst) || inst == 0 {
continue;
}
self.append_bundle(inst, &mut bundle, &mut use_buf);
}
self.res.use_buf = use_buf;
}
fn append_bundle(&mut self, inst: Nid, bundle: &mut Bundle, use_buf: &mut Vec<Nid>) {
let mut dom = self.ctx.idom_of(inst);
if self.ctx.nodes[dom].kind == Kind::Loop && self.ctx.nodes[inst].kind == Kind::Phi {
dom = self.ctx.nodes.idom(dom);
dom = self.ctx.idom_of(dom);
}
std::println!("{inst} {:?}", self.ctx.nodes[inst].key());
self.ctx.uses_of(inst, use_buf);
for uinst in use_buf.drain(..) {
std::println!("| {uinst} {:?}", self.ctx.nodes[uinst].key());
let cursor = self.ctx.use_block(inst, uinst);
self.reverse_cfg_dfs(cursor, dom, |_, n, b| {
let mut range = b.range.clone();
range.start =
range.start.max(self.ctx.instr_of(inst).map_or(0, |n| n + 1) as usize);
range.end = range.end.min(
self.ctx
.instr_of(uinst)
.filter(|_| self.ctx.nodes.loop_depth(dom) == self.ctx.nodes.loop_depth(n))
.map_or(Nid::MAX, |n| n + 1) as usize,
);
std::println!("|- {range:?} {:?}", self.ctx.nodes[n].key());
bundle.add(range);
});
}
match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(bundle)) {
Some((i, other)) => {
other.merge(bundle);
bundle.clear();
self.res.node_to_reg[inst as usize] = i as Reg + 1;
}
None => {
self.res.bundles.push(mem::replace(bundle, Bundle::new(self.func.instrs.len())));
self.res.node_to_reg[inst as usize] = self.res.bundles.len() as Reg;
}
}
std::println!("|= {}", self.res.node_to_reg[inst as usize]);
}
fn reverse_cfg_dfs(
&mut self,
from: Nid,
until: Nid,
mut each: impl FnMut(&mut Self, Nid, &Block),
) {
debug_assert!(self.res.dfs_buf.is_empty());
self.res.dfs_buf.push(from);
self.res.dfs_seem.clear(self.ctx.nodes.values.len());
while let Some(nid) = self.res.dfs_buf.pop() {
each(self, nid, &self.func.blocks[self.ctx.block_of(nid) as usize]);
if nid == until {
continue;
}
match self.ctx.nodes[nid].kind {
Kind::Then | Kind::Else | Kind::Region | Kind::Loop => {
for &n in self.ctx.nodes[nid].inputs.iter() {
let d = self.ctx.idom_of(n);
if self.res.dfs_seem.set(d) {
self.res.dfs_buf.push(d);
}
}
}
Kind::Start => {}
_ => unreachable!(),
}
}
}
}
#[derive(Default)]
pub struct Res {
pub bundles: Vec<Bundle>,
pub node_to_reg: Vec<Reg>,
use_buf: Vec<Nid>,
phi_input_buf: Vec<Nid>,
dfs_buf: Vec<Nid>,
dfs_seem: BitSet,
}
pub struct Bundle {
taken: Vec<bool>,
}
impl Bundle {
fn new(size: usize) -> Self {
Self { taken: vec![false; size] }
}
fn add(&mut self, range: Range<usize>) {
self.taken[range].fill(true);
}
fn overlaps(&self, other: &Self) -> bool {
self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
}
fn merge(&mut self, other: &Self) {
debug_assert!(!self.overlaps(other));
self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a |= *b);
}
fn clear(&mut self) {
self.taken.fill(false);
}
}
#[derive(Default)]
pub struct Func {
pub blocks: Vec<Block>,
pub instrs: Vec<Nid>,
}
pub struct Block {
pub range: Range<usize>,
pub entry: Nid,
}

File diff suppressed because it is too large Load diff

View file

@ -62,6 +62,12 @@ impl BitSet {
const INLINE_ELEMS: usize = Self::UNIT - 1;
const UNIT: usize = core::mem::size_of::<usize>() * 8;
pub fn with_capacity(len: usize) -> Self {
let mut s = Self::default();
s.reserve(len);
s
}
fn is_inline(&self) -> bool {
unsafe { self.inline & Self::FLAG != 0 }
}