holey-bytes/lang/src/backend/hbvm/regalloc.rs
Jakub Doka 6e8eb059f6
adding tuples
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-19 23:08:36 +01:00

1149 lines
43 KiB
Rust

use {
super::ParamAlloc,
crate::{
backend::hbvm::{
reg::{self, Reg},
HbvmBackend, Nid, Nodes, PLoc, Reloc, TypedReloc,
},
lexer::TokenKind,
nodes::{Kind, ARG_START, MEM, VOID},
parser, quad_sort,
ty::{self, Arg, Loc, Module, Offset, Sig, Types},
utils::{BitSet, EntSlice},
},
alloc::{borrow::ToOwned, vec::Vec},
core::{assert_matches::debug_assert_matches, mem, ops::Range},
hbbytecode::{self as instrs},
};
impl HbvmBackend {
pub(super) fn emit_body_code(
&mut self,
nodes: &Nodes,
sig: Sig,
tys: &Types,
files: &EntSlice<Module, parser::Ast>,
) -> (usize, bool) {
let tail = FunctionBuilder::build(nodes, tys, &mut self.ralloc, sig);
let strip_load = |value| match nodes[value].kind {
Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1],
_ => value,
};
let mut res = mem::take(&mut self.ralloc);
let special_reg_count = 13u8;
Regalloc::run(nodes, tys, &mut res, special_reg_count as _);
'_open_function: {
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
self.emit(instrs::st(reg::RET_ADDR + tail as u8, reg::STACK_PTR, 0, 0));
}
if let Some(PLoc::Ref(..)) = tys.parama(sig.ret).0 {
res.node_to_reg[MEM as usize] = res.general_bundles.len() as u8 + 1;
res.general_bundles.push(Bundle::default());
}
let reg_offset = if tail { special_reg_count } else { reg::RET_ADDR + 1 };
let bundle_count = res.general_bundles.len() + (reg_offset as usize);
res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| {
if *r == u8::MAX {
*r = 0
} else {
*r += reg_offset - 1;
if tail && *r >= reg::RET_ADDR {
*r += 1;
}
}
});
debug_assert!(!res
.node_to_reg
.iter()
.any(|&a| a == reg::RET_ADDR || (reg::RET..reg_offset - 1).contains(&a)));
let atr = |allc: Nid| {
let allc = strip_load(allc);
debug_assert!(
nodes.is_unlocked(allc),
"{:?} {}",
nodes[allc],
ty::Display::new(tys, files, nodes[allc].ty)
);
res.node_to_reg[allc as usize]
};
let (retl, mut parama) = tys.parama(sig.ret);
let mut typs = sig.args.args();
let mut args = nodes[VOID].outputs[ARG_START..].iter();
while let Some(aty) = typs.next(tys) {
let Arg::Value(ty) = aty else { continue };
let Some(loc) = parama.next(ty, tys) else { continue };
let &arg = args.next().unwrap();
let (rg, size) = match loc {
PLoc::WideReg(rg, size) => (rg, size),
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
PLoc::Reg(r, ..) | PLoc::Ref(r, ..) => {
self.emit_cp(atr(arg), r);
continue;
}
};
self.emit(instrs::st(rg, reg::STACK_PTR, self.offsets[arg as usize] as _, size));
if nodes.is_unlocked(arg) {
self.emit(instrs::addi64(rg, reg::STACK_PTR, self.offsets[arg as usize] as _));
}
self.emit_cp(atr(arg), rg);
}
let mut alloc_buf = vec![];
for (i, block) in res.blocks.iter().enumerate() {
self.offsets[block.entry as usize] = self.code.len() as _;
for &nid in &res.instrs[block.range()] {
if nid == VOID {
continue;
}
let node = &nodes[nid];
alloc_buf.clear();
let assert_alloc_use = |allc: Nid| {
debug_assert!(
nodes.is_unlocked(allc),
"{:?} {}",
nodes[allc],
ty::Display::new(tys, files, nodes[allc].ty)
);
#[cfg(debug_assertions)]
debug_assert!(
res.marked.contains(&(allc, nid))
|| nid == allc
|| nodes.is_hard_zero(allc)
|| allc == MEM
|| matches!(node.kind, Kind::Loop | Kind::Region),
"{nid} {:?}\n{allc} {:?} {}",
nodes[nid],
nodes[allc],
ty::Display::new(tys, files, nodes[allc].ty)
);
};
let atr = |allc: Nid| {
let allc = strip_load(allc);
assert_alloc_use(allc);
res.node_to_reg[allc as usize]
};
let offset_atr = |pallc: Nid, offsets: &[Offset]| {
let allc = strip_load(pallc);
if nodes.is_locked(allc) {
let (region, offset) = nodes.strip_offset(allc);
match nodes[region].kind {
Kind::Stck => {
return (
reg::STACK_PTR,
offsets[region as usize] as u64 + offset as u64,
)
}
_ => {
assert_alloc_use(region);
return (res.node_to_reg[region as usize], offset as u64);
}
}
}
assert_alloc_use(allc);
(res.node_to_reg[allc as usize], 0)
};
match node.kind {
Kind::Mem => self.emit(instrs::cp(atr(MEM), reg::RET)),
Kind::Arg => {}
Kind::If => {
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
if let Some((op, swapped)) = nodes.cond_op(cnd) {
let &[_, lh, rh] = nodes[cnd].inputs.as_slice() else { unreachable!() };
let [lhs, rhs] = [atr(lh), atr(rh)];
self.extend(nodes[lh].ty, nodes[lh].ty.extend(), lhs, tys, files);
self.extend(nodes[rh].ty, nodes[rh].ty.extend(), rhs, tys, files);
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[!swapped as usize], rel));
self.emit(op(lhs, rhs, 0));
} else {
let cd = atr(cnd);
debug_assert_eq!(nodes[node.outputs[0]].kind, Kind::Then);
self.extend(nodes[cnd].ty, nodes[cnd].ty.extend(), cd, tys, files);
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[0], rel));
self.emit(instrs::jne(cd, reg::ZERO, 0));
}
}
Kind::Loop | Kind::Region => {
let index = node
.inputs
.iter()
.position(|&n| block.entry == nodes.idom_of(n))
.unwrap()
+ 1;
let mut moves = vec![];
for &out in node.outputs.iter() {
if nodes[out].is_data_phi() {
let src = nodes[out].inputs[index];
if atr(out) != atr(src) {
moves.push([atr(out), atr(src), 0]);
}
}
}
// code makes sure all moves are ordered so that register is only moved
// into after all its uses
//
// in case of cycles, swaps are used instead in which case the conflicting
// move is removed and remining moves are replaced with swaps
const CYCLE_SENTINEL: u8 = u8::MAX;
debug_assert_eq!(
{
let mut dests = moves.iter().map(|&[d, ..]| d).collect::<Vec<_>>();
dests.sort_unstable();
dests.dedup();
dests.len()
},
moves.len()
);
let mut graph = [u8::MAX; 256];
for &[d, s, _] in moves.iter() {
graph[d as usize] = s;
}
'o: for &mut [d, s, ref mut depth] in moves.iter_mut() {
let mut c = s;
loop {
if c == d {
break;
}
c = graph[c as usize];
*depth += 1;
if c == u8::MAX {
continue 'o;
}
}
// cut the cycle
graph[c as usize] = u8::MAX;
// mark cycyle
*depth = CYCLE_SENTINEL;
}
quad_sort(&mut moves, |a, b| a[2].cmp(&b[2]).reverse());
for [mut d, mut s, depth] in moves {
if depth == CYCLE_SENTINEL {
while graph[s as usize] != u8::MAX {
self.emit(instrs::swa(d, s));
d = s;
mem::swap(&mut graph[s as usize], &mut s);
}
// trivial cycle denotes this move was already generated in a
// cycyle
graph[s as usize] = s;
} else if graph[s as usize] != s {
self.emit(instrs::cp(d, s));
}
}
if res.backrefs[nid as usize] as usize != i + 1 {
let rel = Reloc::new(self.code.len(), 1, 4);
self.jump_relocs.push((nid, rel));
self.emit(instrs::jmp(0));
}
}
Kind::Return { .. } => {
let &[_, ret, ..] = node.inputs.as_slice() else { unreachable!() };
match retl {
None => {}
Some(PLoc::Reg(r, size)) if sig.ret.loc(tys) == Loc::Stack => {
let (src, offset) = offset_atr(ret, &self.offsets);
self.emit(instrs::ld(r, src, offset, size))
}
Some(PLoc::WideReg(r, size)) => {
let (src, offset) = offset_atr(ret, &self.offsets);
self.emit(instrs::ld(r, src, offset, size))
}
Some(PLoc::Reg(r, _)) => {
alloc_buf.push(atr(ret));
self.emit(instrs::cp(r, atr(ret)));
}
Some(PLoc::Ref(_, size)) => {
let [src, dst] = [atr(ret), atr(MEM)];
if let Ok(size) = u16::try_from(size) {
self.emit(instrs::bmc(src, dst, size));
} else {
for _ in 0..size / u16::MAX as u32 {
self.emit(instrs::bmc(src, dst, u16::MAX));
self.emit(instrs::addi64(src, src, u16::MAX as _));
self.emit(instrs::addi64(dst, dst, u16::MAX as _));
}
self.emit(instrs::bmc(src, dst, size as u16));
self.emit(instrs::addi64(src, src, size.wrapping_neg() as _));
self.emit(instrs::addi64(dst, dst, size.wrapping_neg() as _));
}
}
}
if i != res.blocks.len() - 1 {
let rel = Reloc::new(self.code.len(), 1, 4);
self.ret_relocs.push(rel);
self.emit(instrs::jmp(0));
}
}
Kind::Die => {
self.emit(instrs::un());
}
Kind::CInt { value: 0 } => self.emit(instrs::cp(atr(nid), reg::ZERO)),
Kind::CInt { value } if node.ty == ty::Id::F32 => {
self.emit(instrs::li32(
atr(nid),
(f64::from_bits(value as _) as f32).to_bits(),
));
}
Kind::CInt { value } => self.emit(match tys.size_of(node.ty) {
1 => instrs::li8(atr(nid), value as _),
2 => instrs::li16(atr(nid), value as _),
4 => instrs::li32(atr(nid), value as _),
_ => instrs::li64(atr(nid), value as _),
}),
Kind::UnOp { op } => {
let op = op
.unop(
node.ty,
tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty),
tys,
)
.unwrap_or_else(|| {
panic!(
"TODO: unary operator not supported: {op} {} {}",
ty::Display::new(tys, files, node.ty),
ty::Display::new(
tys,
files,
tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty)
)
)
});
self.emit(op(atr(nid), atr(node.inputs[1])));
}
Kind::BinOp { op } => {
let &[.., lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
if let Kind::CInt { value } = nodes[rhs].kind
&& nodes.is_locked(rhs)
&& let Some(op) = op.imm_binop(node.ty)
{
self.emit(op(atr(nid), atr(lhs), value as _));
} else if node.kind == (Kind::BinOp { op: TokenKind::Add })
&& node.ty.is_float()
&& nodes.is_locked(lhs)
{
let fma = [instrs::fma32, instrs::fma64]
[node.ty.simple_size().unwrap().ilog2() as usize - 2];
self.emit(fma(
atr(nid),
atr(nodes[lhs].inputs[1]),
atr(nodes[lhs].inputs[2]),
atr(rhs),
));
} else if let Some(against) = op.cmp_against() {
let op_ty = nodes[rhs].ty;
let [dst, lhs, rhs] = [atr(nid), atr(lhs), atr(rhs)];
if let Some(op) = op.float_cmp(op_ty) {
self.emit(op(dst, lhs, rhs));
} else if op_ty.is_float()
&& matches!(op, TokenKind::Le | TokenKind::Ge)
{
let op = match op {
TokenKind::Le => TokenKind::Gt,
TokenKind::Ge => TokenKind::Lt,
_ => unreachable!(),
};
let op_fn = op.float_cmp(op_ty).unwrap();
self.emit(op_fn(dst, lhs, rhs));
self.emit(instrs::not(dst, dst));
} else {
let op_fn =
if op_ty.is_signed() { instrs::cmps } else { instrs::cmpu };
self.emit(op_fn(dst, lhs, rhs));
self.emit(instrs::cmpui(dst, dst, against));
if matches!(op, TokenKind::Eq | TokenKind::Lt | TokenKind::Gt) {
self.emit(instrs::not(dst, dst));
}
}
} else if let Some(op) = op.binop(node.ty) {
let [dst, lhs, rhs] = [atr(nid), atr(lhs), atr(rhs)];
self.emit(op(dst, lhs, rhs));
} else {
todo!("unhandled operator: {op}");
}
}
Kind::Call { args, func, .. } => {
let (ret, mut parama) = tys.parama(node.ty);
debug_assert!(node.ty != ty::Id::NEVER || ret.is_none());
if let Some(PLoc::Ref(r, ..)) = ret {
self.emit(instrs::cp(r, atr(*node.inputs.last().unwrap())))
}
let mut args = args.args();
let mut allocs = node.inputs[1..].iter();
while let Some(arg) = args.next(tys) {
let Arg::Value(ty) = arg else { continue };
let Some(loc) = parama.next(ty, tys) else { continue };
let &arg = allocs.next().unwrap();
let (rg, size) = match loc {
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
PLoc::WideReg(rg, size) => (rg, size),
PLoc::Ref(r, ..) | PLoc::Reg(r, ..) => {
self.emit(instrs::cp(r, atr(arg)));
continue;
}
};
let (src, off) = offset_atr(arg, &self.offsets);
self.emit(instrs::ld(rg, src, off, size));
}
if func == ty::Func::ECA {
self.emit(instrs::eca());
} else {
self.relocs.push(TypedReloc {
target: func.into(),
reloc: Reloc::new(self.code.len(), 3, 4),
});
self.emit(instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
}
if node.ty.loc(tys) == Loc::Stack
&& let Some(PLoc::Reg(r, size) | PLoc::WideReg(r, size)) = ret
{
self.emit(instrs::st(r, atr(*node.inputs.last().unwrap()), 0, size));
}
//match ret {
// Some(PLoc::WideReg(..)) => {}
// Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {}
// Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r),
// None | Some(PLoc::Ref(..)) => {}
//}
}
Kind::RetVal => {
self.emit_cp(atr(nid), reg::RET);
}
Kind::Global { global } => {
let reloc = Reloc::new(self.code.len(), 3, 4);
self.relocs.push(TypedReloc { target: global.into(), reloc });
self.emit(instrs::lra(atr(nid), 0, 0));
}
Kind::Stck => {
let base = reg::STACK_PTR;
let offset = self.offsets[nid as usize];
self.emit(instrs::addi64(atr(nid), base, offset as _));
}
Kind::Load => {
let (region, offset) = nodes.strip_offset(node.inputs[1]);
let size = tys.size_of(node.ty);
if node.ty.loc(tys) != Loc::Stack {
let (base, offset) = match nodes[region].kind {
Kind::Stck => {
(reg::STACK_PTR, self.offsets[region as usize] + offset)
}
_ => (atr(region), offset),
};
self.emit(instrs::ld(atr(nid), base, offset as _, size as _));
}
}
Kind::Stre => {
debug_assert_ne!(node.inputs[1], VOID);
let (region, offset) = nodes.strip_offset(node.inputs[2]);
let size = u16::try_from(tys.size_of(node.ty)).expect("TODO");
let (base, offset, src) = match nodes[region].kind {
Kind::Stck if node.ty.loc(tys) == Loc::Reg => (
reg::STACK_PTR,
self.offsets[region as usize] + offset,
atr(node.inputs[1]),
),
_ => (atr(region), offset, atr(node.inputs[1])),
};
match node.ty.loc(tys) {
Loc::Reg => self.emit(instrs::st(src, base, offset as _, size)),
Loc::Stack => {
debug_assert_eq!(offset, 0);
self.emit(instrs::bmc(src, base, size))
}
}
}
e @ (Kind::Start
| Kind::Assert { .. }
| Kind::Entry
| Kind::End
| Kind::Loops
| Kind::Then
| Kind::Else
| Kind::Phi
| Kind::Join) => unreachable!("{e:?}"),
}
}
}
self.ralloc = res;
debug_assert!(bundle_count < reg::STACK_PTR as usize, "TODO: spill memory");
debug_assert_eq!(
self.ralloc
.node_to_reg
.iter()
.filter(|&&r| r
> (bundle_count as u8
+ (tail && bundle_count > (reg::RET_ADDR) as usize) as u8))
.copied()
.collect::<Vec<_>>(),
vec![],
"{bundle_count}"
);
(
if tail {
bundle_count.saturating_sub(reg::RET_ADDR as _)
} else {
self.ralloc.general_bundles.len()
},
tail,
)
}
fn emit_cp(&mut self, dst: Reg, src: Reg) {
if dst != 0 {
self.emit(instrs::cp(dst, src));
}
}
}
struct FunctionBuilder<'a> {
sig: Sig,
tail: bool,
nodes: &'a Nodes,
tys: &'a Types,
func: &'a mut Res,
}
impl core::fmt::Debug for FunctionBuilder<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
for block in &self.func.blocks {
writeln!(f, "{:?}", self.nodes[block.entry].kind)?;
for &instr in &self.func.instrs[block.range()] {
writeln!(f, "{:?}", self.nodes[instr].kind)?;
}
}
Ok(())
}
}
impl<'a> FunctionBuilder<'a> {
fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool {
func.blocks.clear();
func.instrs.clear();
func.backrefs.resize(nodes.len(), u16::MAX);
func.visited.clear(nodes.len());
let mut s = Self { tail: true, nodes, tys, sig, func };
s.emit_node(VOID);
debug_assert!(s.func.blocks.array_chunks().all(|[a, b]| a.end == b.start));
log::info!("{s:?}");
s.tail
}
fn add_block(&mut self, entry: Nid) {
self.func.blocks.push(Block {
start: self.func.instrs.len() as _,
end: self.func.instrs.len() as _,
entry,
});
self.func.backrefs[entry as usize] = self.func.blocks.len() as u16 - 1;
}
fn close_block(&mut self, exit: Nid) {
if !matches!(self.nodes[exit].kind, Kind::Loop | Kind::Region) {
self.add_instr(exit);
} else {
self.func.instrs.push(exit);
}
let prev = self.func.blocks.last_mut().unwrap();
prev.end = self.func.instrs.len() as _;
}
fn add_instr(&mut self, nid: Nid) {
debug_assert_ne!(self.nodes[nid].kind, Kind::Loop);
self.func.backrefs[nid as usize] = self.func.instrs.len() as u16;
self.func.instrs.push(nid);
}
fn emit_node(&mut self, nid: Nid) {
if matches!(self.nodes[nid].kind, Kind::Region | Kind::Loop) {
match (self.nodes[nid].kind, self.func.visited.set(nid)) {
(Kind::Loop, false) | (Kind::Region, true) => {
self.close_block(nid);
return;
}
_ => {}
}
} else if !self.func.visited.set(nid) {
return;
}
if self.nodes.is_never_used(nid, self.tys) {
self.nodes.lock(nid);
return;
}
let node = &self.nodes[nid];
match node.kind {
Kind::Start => {
debug_assert_matches!(self.nodes[node.outputs[0]].kind, Kind::Entry);
self.add_block(VOID);
self.emit_node(node.outputs[0])
}
Kind::If => {
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
let &[mut then, mut else_] = node.outputs.as_slice() else { unreachable!() };
if let Some((_, swapped)) = self.nodes.cond_op(cnd) {
if swapped {
mem::swap(&mut then, &mut else_);
}
} else {
mem::swap(&mut then, &mut else_);
}
self.close_block(nid);
self.emit_node(then);
self.emit_node(else_);
}
Kind::Region | Kind::Loop => {
self.close_block(nid);
self.add_block(nid);
for &o in node.outputs.iter().rev() {
self.emit_node(o);
}
}
Kind::Return { .. } | Kind::Die => {
self.close_block(nid);
self.emit_node(node.outputs[0]);
}
Kind::Entry => {
let (ret, mut parama) = self.tys.parama(self.sig.ret);
if let Some(PLoc::Ref(..)) = ret {
self.add_instr(MEM);
}
let mut typs = self.sig.args.args();
#[expect(clippy::unnecessary_to_owned)]
let mut args = self.nodes[VOID].outputs[ARG_START..].to_owned().into_iter();
while let Some(ty) = typs.next_value(self.tys) {
let arg = args.next().unwrap();
debug_assert_eq!(self.nodes[arg].kind, Kind::Arg);
match parama.next(ty, self.tys) {
None => {}
Some(_) => self.add_instr(arg),
}
}
for &o in node.outputs.iter().rev() {
self.emit_node(o);
}
}
Kind::Then | Kind::Else => {
self.add_block(nid);
for &o in node.outputs.iter().rev() {
self.emit_node(o);
}
}
Kind::Call { func, unreachable, .. } => {
self.tail &= func == ty::Func::ECA;
if unreachable {
self.close_block(nid);
self.emit_node(node.outputs[0]);
} else {
self.add_instr(nid);
for &o in node.outputs.iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
&& self.nodes[o].inputs[1] == nid)
{
self.emit_node(o);
}
}
}
}
Kind::CInt { value: 0 } if self.nodes.is_hard_zero(nid) => {}
Kind::CInt { .. }
| Kind::BinOp { .. }
| Kind::UnOp { .. }
| Kind::Global { .. }
| Kind::Load { .. }
| Kind::Stre
| Kind::RetVal
| Kind::Stck => self.add_instr(nid),
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => {}
Kind::Assert { .. } => unreachable!(),
}
}
}
impl Nodes {
fn vreg_count(&self) -> usize {
self.len()
}
pub fn is_data_dep(&self, val: Nid, user: Nid, #[expect(unused)] types: &Types) -> bool {
match self[user].kind {
Kind::Return { .. } => self[user].inputs[1] == val,
_ if self.is_cfg(user) && !matches!(self[user].kind, Kind::Call { .. } | Kind::If) => {
false
}
Kind::Join => false,
Kind::Stre => {
debug_assert_eq!(
self[user].inputs[4..]
.iter()
.filter(|&&v| self[v].kind != Kind::Load)
.copied()
.collect::<Vec<_>>(),
vec![]
);
debug_assert_matches!(
self[self[user].inputs[3]].kind,
Kind::Stre | Kind::Mem | Kind::Phi | Kind::Join
);
self[user].inputs.iter().position(|&v| v == val).is_some_and(|v| v < 3)
}
Kind::Load => self[user].inputs[2] != val,
_ => self[user].inputs[0] != val || self[user].inputs[1..].contains(&val),
}
}
fn use_block_of(&self, inst: Nid, uinst: Nid) -> Nid {
let mut block = self.use_block(inst, uinst, None);
while !self[block].kind.starts_basic_block() {
block = self.idom(block, None);
}
block
}
fn phi_inputs_of(&self, nid: Nid) -> impl Iterator<Item = [Nid; 3]> + use<'_> {
match self[nid].kind {
Kind::Region | Kind::Loop => Some({
self[nid]
.outputs
.as_slice()
.iter()
.filter(|&&n| self[n].is_data_phi())
.map(|&n| [n, self[n].inputs[1], self[n].inputs[2]])
})
.into_iter()
.flatten(),
_ => None.into_iter().flatten(),
}
}
fn idom_of(&self, mut nid: Nid) -> Nid {
while !self[nid].kind.starts_basic_block() {
nid = self.idom(nid, None);
}
nid
}
fn uses_of(
&self,
nid: Nid,
types: &Types,
stack: &mut Vec<Nid>,
buf: &mut Vec<(Nid, Nid, Reg)>,
) {
debug_assert!(stack.is_empty());
debug_assert!(buf.is_empty());
if self[nid].kind.is_cfg() && !matches!(self[nid].kind, Kind::Call { .. }) {
return;
}
stack.push(nid);
while let Some(exp) = stack.pop() {
for &o in self[exp].outputs.iter() {
if !self.is_data_dep(exp, o, types) {
continue;
}
if self.is_unlocked(o) {
buf.push((self.use_block_of(exp, o), o, self.use_reg_of(exp, o)));
} else {
stack.push(o);
}
}
}
}
#[expect(unused)]
fn init_loc_of(&self, def: Nid, types: &Types) -> Reg {
if self[def].kind == Kind::Arg {
let mut parama = ParamAlloc(0..11);
let (_, ploc) = self[VOID]
.outputs
.iter()
.skip(ARG_START)
.map(|&n| (n, parama.next(self[n].ty, types)))
.find(|&(n, _)| n == def)
.unwrap();
return ploc.unwrap().reg();
}
255
}
#[expect(unused)]
fn use_reg_of(&self, def: Nid, usage: Nid) -> Reg {
//if matches!(self[usage].kind, Kind::Return { .. }) {}
255
}
}
struct Regalloc<'a> {
nodes: &'a Nodes,
tys: &'a Types,
res: &'a mut Res,
}
impl<'a> Regalloc<'a> {
fn instr_of(&self, nid: Nid) -> Option<Nid> {
if self.nodes[nid].kind == Kind::Phi || self.nodes.is_locked(nid) {
return None;
}
debug_assert_ne!(self.res.backrefs[nid as usize], Nid::MAX, "{:?}", self.nodes[nid]);
Some(self.res.backrefs[nid as usize])
}
fn block_of(&self, nid: Nid) -> Nid {
debug_assert!(self.nodes[nid].kind.starts_basic_block());
self.res.backrefs[nid as usize]
}
fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res, special_count: usize) {
Self { nodes: ctx, tys, res }.run_low(special_count);
}
fn run_low(&mut self, #[expect(unused)] special_count: usize) {
self.res.general_bundles.clear();
self.res.node_to_reg.clear();
#[cfg(debug_assertions)]
self.res.marked.clear();
self.res.node_to_reg.resize(self.nodes.vreg_count(), 0);
self.res.call_set.clear();
for (i, &instr) in self.res.instrs.iter().enumerate() {
if self.nodes[instr].kind.is_call() {
self.res.call_set.add_one(i);
}
}
debug_assert!(self.res.dfs_buf.is_empty());
let mut uses_buf = Vec::new();
let mut range_buf = Vec::new();
let mut bundle = Bundle::default();
self.res.visited.clear(self.nodes.len());
for i in (0..self.res.blocks.len()).rev() {
for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) {
if self.res.visited.set(a) {
self.append_bundle(a, &mut bundle, None, &mut uses_buf, &mut range_buf);
}
for r in rest {
if !self.res.visited.set(r) {
continue;
}
self.append_bundle(
r,
&mut bundle,
Some(self.res.node_to_reg[a as usize] as usize - 1),
&mut uses_buf,
&mut range_buf,
);
}
}
}
let instrs = mem::take(&mut self.res.instrs);
for &inst in &instrs {
if self.nodes[inst].has_no_value() || self.res.visited.get(inst) || inst == 0 {
continue;
}
self.append_bundle(inst, &mut bundle, None, &mut uses_buf, &mut range_buf);
}
self.res.instrs = instrs;
}
fn append_bundle(
&mut self,
inst: Nid,
tmp: &mut Bundle,
prefered: Option<usize>,
uses_buf: &mut Vec<(Nid, Nid, Reg)>,
range_buf: &mut Vec<Range<usize>>,
) {
let dom = self.nodes.idom_of(inst);
self.res.dfs_seem.clear(self.nodes.len());
self.nodes.uses_of(inst, self.tys, &mut self.res.dfs_buf, uses_buf);
let mut prefered_reg = reg::ZERO;
for (cursor, uinst, reg) in uses_buf.drain(..) {
prefered_reg = prefered_reg.min(reg);
if !self.res.dfs_seem.set(uinst) {
continue;
}
#[cfg(debug_assertions)]
debug_assert!(self.res.marked.insert((inst, uinst)));
self.reverse_cfg_dfs(cursor, dom, |s, n, b| {
let mut range = b.range();
debug_assert!(range.start < range.end);
range.start = range.start.max(s.instr_of(inst).map_or(0, |n| n + 1) as usize);
debug_assert!(
range.start < range.end,
"{:?} {:?} {n} {inst}",
range,
self.nodes[inst]
);
let new = range.end.min(
s.instr_of(uinst)
.filter(|_| {
n == cursor
&& self.nodes.loop_depth(dom, None)
== self.nodes.loop_depth(cursor, None)
})
.map_or(Nid::MAX, |n| n + 1) as usize,
);
range.end = new;
debug_assert!(range.start < range.end, "{:?} {inst} {uinst}", range);
range_buf.push(range)
});
range_buf.sort_unstable_by_key(|r| r.start);
range_buf.dedup_by(|a, b| {
if b.end == a.start {
b.end = a.end;
true
} else {
false
}
});
for range in range_buf.drain(..) {
tmp.add(range);
}
}
if tmp.is_empty() {
self.res.node_to_reg[inst as usize] = u8::MAX;
return;
}
if let Some(prefered) = prefered
&& !self.res.general_bundles[prefered].overlaps(tmp)
{
self.res.general_bundles[prefered].merge(tmp);
tmp.clear();
self.res.node_to_reg[inst as usize] = prefered as Reg + 1;
return;
}
match self.res.general_bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) {
Some((i, other)) => {
other.merge(tmp);
tmp.clear();
self.res.node_to_reg[inst as usize] = i as Reg + 1;
}
None => {
self.res.general_bundles.push(tmp.take());
self.res.node_to_reg[inst as usize] = self.res.general_bundles.len() as Reg;
}
}
}
fn reverse_cfg_dfs(
&mut self,
from: Nid,
until: Nid,
mut each: impl FnMut(&mut Self, Nid, Block),
) {
debug_assert!(self.res.dfs_buf.is_empty());
self.res.dfs_buf.push(from);
debug_assert!(self.nodes.dominates(until, from, None));
while let Some(nid) = self.res.dfs_buf.pop() {
debug_assert!(
self.nodes.dominates(until, nid, None),
"{until} {:?}",
self.nodes[until]
);
each(self, nid, self.res.blocks[self.block_of(nid) as usize]);
if nid == until {
continue;
}
match self.nodes[nid].kind {
Kind::Then | Kind::Else | Kind::Region | Kind::Loop => {
for &n in self.nodes[nid].inputs.iter() {
if self.nodes[n].kind == Kind::Loops {
continue;
}
let d = self.nodes.idom_of(n);
if self.res.dfs_seem.set(d) {
self.res.dfs_buf.push(d);
}
}
}
Kind::Start => {}
_ => unreachable!(),
}
}
}
}
#[derive(Default)]
pub(super) struct Res {
blocks: Vec<Block>,
instrs: Vec<Nid>,
backrefs: Vec<u16>,
general_bundles: Vec<Bundle>,
call_set: Bundle,
node_to_reg: Vec<Reg>,
visited: BitSet,
dfs_buf: Vec<Nid>,
dfs_seem: BitSet,
#[cfg(debug_assertions)]
marked: hashbrown::HashSet<(Nid, Nid), crate::FnvBuildHasher>,
}
struct Bundle {
start: usize,
end: usize,
usage: BitSet,
}
impl Default for Bundle {
fn default() -> Self {
Self { start: usize::MAX, end: 0, usage: Default::default() }
}
}
impl Bundle {
fn add(&mut self, range: Range<usize>) {
debug_assert!(!range.is_empty());
debug_assert!(range.start / BitSet::UNIT >= self.start || self.start == usize::MAX);
self.start = self.start.min(range.start / BitSet::UNIT);
self.end = self.end.max(range.end.div_ceil(BitSet::UNIT));
let proj_range =
range.start - self.start * BitSet::UNIT..range.end - self.start * BitSet::UNIT;
self.usage.set_range(proj_range)
}
fn overlaps(&self, othr: &Self) -> bool {
let overlap = self.start.max(othr.start)..self.end.min(othr.end);
if overlap.start >= overlap.end {
return false;
}
let [mut sslot, mut oslot] = [0, 0];
let sunits =
&self.usage.units(&mut sslot)[overlap.start - self.start..overlap.end - self.start];
let ounits =
&othr.usage.units(&mut oslot)[overlap.start - othr.start..overlap.end - othr.start];
debug_assert_eq!(sunits.len(), ounits.len());
let res = sunits.iter().zip(ounits).any(|(a, b)| (a & b) != 0);
res
}
fn merge(&mut self, othr: &Self) {
debug_assert!(!self.overlaps(othr));
debug_assert!(self.start <= othr.start || self.start == usize::MAX);
self.usage.reserve((othr.end - self.start) * BitSet::UNIT);
self.start = self.start.min(othr.start);
self.end = self.end.max(othr.end);
let sunits =
&mut self.usage.units_mut().unwrap()[othr.start - self.start..othr.end - self.start];
let mut oslot = 0;
let ounits = othr.usage.units(&mut oslot);
sunits.iter_mut().zip(ounits).for_each(|(a, b)| *a |= *b);
}
fn clear(&mut self) {
self.start = usize::MAX;
self.end = 0;
self.usage.clear_as_is();
}
fn is_empty(&self) -> bool {
self.end == 0
}
fn take(&mut self) -> Self {
let mut new = Self { start: 0, ..Self::default() };
new.merge(self);
self.clear();
new
}
fn add_one(&mut self, i: usize) {
self.start = self.start.min(i / BitSet::UNIT);
self.end = self.end.max(i.div_ceil(BitSet::UNIT));
self.usage.set(i as _);
}
}
#[derive(Clone, Copy)]
struct Block {
start: u16,
end: u16,
entry: Nid,
}
impl Block {
pub fn range(&self) -> Range<usize> {
self.start as usize..self.end as usize
}
}