POC for removeing needless stack offset computes when only value is used

TBD: there are far more cases where this will apply

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
Jakub Doka 2024-12-15 17:17:41 +01:00
parent 00f6729d31
commit 48a0c8d0b9
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
9 changed files with 314 additions and 417 deletions

View file

@ -3,8 +3,8 @@ use {
crate::{
lexer::TokenKind,
parser,
son::{Kind, Nid, Node, Nodes, MEM, VOID},
ty::{self, Arg, Loc, Module, Offset, Sig, Size, Types},
son::{Kind, Nid, Nodes, MEM},
ty::{self, Loc, Module, Offset, Size, Types},
utils::{EntSlice, EntVec},
},
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
@ -417,7 +417,7 @@ impl Nodes {
Kind::BinOp { op: TokenKind::Add | TokenKind::Sub } => {
self.is_locked(node.inputs[1])
|| (self.is_const(node.inputs[2])
&& node.outputs.iter().all(|&n| self[n].uses_direct_offset_of(nid, tys)))
&& node.outputs.iter().all(|&n| self.uses_direct_offset_of(n, nid, tys)))
}
Kind::BinOp { op } => {
op.cond_op(self[node.inputs[1]].ty).is_some()
@ -425,7 +425,7 @@ impl Nodes {
}
Kind::Stck if tys.size_of(node.ty) == 0 => true,
Kind::Stck | Kind::Arg => node.outputs.iter().all(|&n| {
self[n].uses_direct_offset_of(nid, tys)
self.uses_direct_offset_of(n, nid, tys)
|| (matches!(self[n].kind, Kind::BinOp { op: TokenKind::Add })
&& self.is_never_used(n, tys))
}),
@ -433,18 +433,17 @@ impl Nodes {
_ => false,
}
}
}
struct InstrCtx<'a> {
nid: Nid,
sig: Sig,
is_last_block: bool,
is_next_block: bool,
retl: Option<PLoc>,
allocs: &'a [u8],
nodes: &'a Nodes,
tys: &'a Types,
files: &'a EntSlice<Module, parser::Ast>,
fn uses_direct_offset_of(&self, user: Nid, target: Nid, tys: &Types) -> bool {
let node = &self[user];
((node.kind == Kind::Stre && node.inputs[2] == target)
|| (node.kind == Kind::Load && node.inputs[1] == target))
&& (node.ty.loc(tys) == Loc::Reg
// this means the struct is actually loaded into a register so no BMC needed
|| (node.kind == Kind::Load
&& !matches!(tys.parama(node.ty).0, Some(PLoc::Ref(..)))
&& node.outputs.iter().all(|&o| self[o].kind.is_call())))
}
}
impl HbvmBackend {
@ -477,253 +476,6 @@ impl HbvmBackend {
}
});
}
fn emit_instr(
&mut self,
InstrCtx {
nid,
sig,
is_last_block,
is_next_block,
allocs,
nodes,
tys,
files,
retl,
}: InstrCtx,
) {
let node = &nodes[nid];
match node.kind {
Kind::If => {
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
if let Some((op, swapped)) = nodes.cond_op(cnd) {
let &[lhs, rhs] = allocs else { unreachable!() };
let &[_, lh, rh] = nodes[cnd].inputs.as_slice() else { unreachable!() };
self.extend(nodes[lh].ty, nodes[lh].ty.extend(), lhs, tys, files);
self.extend(nodes[rh].ty, nodes[rh].ty.extend(), rhs, tys, files);
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[!swapped as usize], rel));
self.emit(op(lhs, rhs, 0));
} else {
debug_assert_eq!(nodes[node.outputs[0]].kind, Kind::Then);
self.extend(nodes[cnd].ty, nodes[cnd].ty.extend(), allocs[0], tys, files);
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[0], rel));
self.emit(instrs::jne(allocs[0], reg::ZERO, 0));
}
}
Kind::Loop | Kind::Region => {
if !is_next_block {
let rel = Reloc::new(self.code.len(), 1, 4);
self.jump_relocs.push((nid, rel));
self.emit(instrs::jmp(0));
}
}
Kind::Return { .. } => {
match retl {
Some(PLoc::Reg(r, size)) if sig.ret.loc(tys) == Loc::Stack => {
self.emit(instrs::ld(r, allocs[0], 0, size))
}
None | Some(PLoc::Reg(..)) => {}
Some(PLoc::WideReg(r, size)) => self.emit(instrs::ld(r, allocs[0], 0, size)),
Some(PLoc::Ref(_, size)) => {
let [src, dst] = [allocs[0], allocs[1]];
if let Ok(size) = u16::try_from(size) {
self.emit(instrs::bmc(src, dst, size));
} else {
for _ in 0..size / u16::MAX as u32 {
self.emit(instrs::bmc(src, dst, u16::MAX));
self.emit(instrs::addi64(src, src, u16::MAX as _));
self.emit(instrs::addi64(dst, dst, u16::MAX as _));
}
self.emit(instrs::bmc(src, dst, size as u16));
self.emit(instrs::addi64(src, src, size.wrapping_neg() as _));
self.emit(instrs::addi64(dst, dst, size.wrapping_neg() as _));
}
}
}
if !is_last_block {
let rel = Reloc::new(self.code.len(), 1, 4);
self.ret_relocs.push(rel);
self.emit(instrs::jmp(0));
}
}
Kind::Die => {
self.emit(instrs::un());
}
Kind::CInt { value: 0 } => self.emit(instrs::cp(allocs[0], reg::ZERO)),
Kind::CInt { value } if node.ty == ty::Id::F32 => {
self.emit(instrs::li32(allocs[0], (f64::from_bits(value as _) as f32).to_bits()));
}
Kind::CInt { value } => self.emit(match tys.size_of(node.ty) {
1 => instrs::li8(allocs[0], value as _),
2 => instrs::li16(allocs[0], value as _),
4 => instrs::li32(allocs[0], value as _),
_ => instrs::li64(allocs[0], value as _),
}),
Kind::UnOp { op } => {
let op = op
.unop(
node.ty,
tys.inner_of(nodes[node.inputs[1]].ty).unwrap_or(nodes[node.inputs[1]].ty),
)
.unwrap_or_else(|| {
panic!(
"TODO: unary operator not supported: {op} {} {}",
ty::Display::new(tys, files, node.ty),
ty::Display::new(
tys,
files,
tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty)
)
)
});
let &[dst, oper] = allocs else { unreachable!() };
self.emit(op(dst, oper));
}
Kind::BinOp { op } => {
let &[.., rh] = node.inputs.as_slice() else { unreachable!() };
if let Kind::CInt { value } = nodes[rh].kind
&& nodes.is_locked(rh)
&& let Some(op) = op.imm_binop(node.ty)
{
let &[dst, lhs] = allocs else { unreachable!() };
self.emit(op(dst, lhs, value as _));
} else if let Some(against) = op.cmp_against() {
let op_ty = nodes[rh].ty;
let &[dst, lhs, rhs] = allocs else { unreachable!() };
if let Some(op) = op.float_cmp(op_ty) {
self.emit(op(dst, lhs, rhs));
} else if op_ty.is_float() && matches!(op, TokenKind::Le | TokenKind::Ge) {
let op = match op {
TokenKind::Le => TokenKind::Gt,
TokenKind::Ge => TokenKind::Lt,
_ => unreachable!(),
};
let op_fn = op.float_cmp(op_ty).unwrap();
self.emit(op_fn(dst, lhs, rhs));
self.emit(instrs::not(dst, dst));
} else {
let op_fn = if op_ty.is_signed() { instrs::cmps } else { instrs::cmpu };
self.emit(op_fn(dst, lhs, rhs));
self.emit(instrs::cmpui(dst, dst, against));
if matches!(op, TokenKind::Eq | TokenKind::Lt | TokenKind::Gt) {
self.emit(instrs::not(dst, dst));
}
}
} else if let Some(op) = op.binop(node.ty) {
let &[dst, lhs, rhs] = allocs else { unreachable!() };
self.emit(op(dst, lhs, rhs));
} else {
todo!("unhandled operator: {op}");
}
}
Kind::Call { args, func } => {
let (ret, mut parama) = tys.parama(node.ty);
let has_ret = ret.is_some() as usize;
let mut args = args.args();
let mut allocs = allocs[has_ret..].iter();
while let Some(arg) = args.next(tys) {
let Arg::Value(ty) = arg else { continue };
let Some(loc) = parama.next(ty, tys) else { continue };
let &arg = allocs.next().unwrap();
let (rg, size) = match loc {
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
PLoc::WideReg(rg, size) => (rg, size),
PLoc::Ref(..) | PLoc::Reg(..) => continue,
};
if size > 8 {
allocs.next().unwrap();
}
self.emit(instrs::ld(rg, arg, 0, size));
}
debug_assert!(!matches!(ret, Some(PLoc::Ref(..))) || allocs.next().is_some());
if func == ty::Func::ECA {
self.emit(instrs::eca());
} else {
self.relocs.push(TypedReloc {
target: func.into(),
reloc: Reloc::new(self.code.len(), 3, 4),
});
self.emit(instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
}
if node.ty.loc(tys) == Loc::Stack
&& let Some(PLoc::Reg(r, size) | PLoc::WideReg(r, size)) = ret
{
self.emit(instrs::st(r, *allocs.last().unwrap(), 0, size));
}
}
Kind::Global { global } => {
let reloc = Reloc::new(self.code.len(), 3, 4);
self.relocs.push(TypedReloc { target: global.into(), reloc });
self.emit(instrs::lra(allocs[0], 0, 0));
}
Kind::Stck => {
let base = reg::STACK_PTR;
let offset = self.offsets[nid as usize];
self.emit(instrs::addi64(allocs[0], base, offset as _));
}
Kind::Load => {
let (region, offset) = nodes.strip_offset(node.inputs[1], node.ty, tys);
let size = tys.size_of(node.ty);
if node.ty.loc(tys) != Loc::Stack {
let (base, offset) = match nodes[region].kind {
Kind::Stck => (reg::STACK_PTR, self.offsets[region as usize] + offset),
_ => (allocs[1], offset),
};
self.emit(instrs::ld(allocs[0], base, offset as _, size as _));
}
}
Kind::Stre if node.inputs[1] == VOID => {}
Kind::Stre => {
let (region, offset) = nodes.strip_offset(node.inputs[2], node.ty, tys);
let size = u16::try_from(tys.size_of(node.ty)).expect("TODO");
let (base, offset, src) = match nodes[region].kind {
Kind::Stck if node.ty.loc(tys) == Loc::Reg => {
(reg::STACK_PTR, self.offsets[region as usize] + offset, allocs[0])
}
_ => ((allocs[0]), offset, allocs[1]),
};
match node.ty.loc(tys) {
Loc::Reg => self.emit(instrs::st(src, base, offset as _, size)),
Loc::Stack => {
debug_assert_eq!(offset, 0);
self.emit(instrs::bmc(src, base, size))
}
}
}
e @ (Kind::Start
| Kind::Assert { .. }
| Kind::Entry
| Kind::Mem
| Kind::End
| Kind::Loops
| Kind::Then
| Kind::Else
| Kind::Phi
| Kind::Arg
| Kind::Join) => unreachable!("{e:?}"),
}
}
}
impl Node {
fn uses_direct_offset_of(&self, nid: Nid, tys: &Types) -> bool {
((self.kind == Kind::Stre && self.inputs[2] == nid)
|| (self.kind == Kind::Load && self.inputs[1] == nid))
&& self.ty.loc(tys) == Loc::Reg
}
}
type CondRet = Option<(fn(u8, u8, i16) -> EncodedInstr, bool)>;

View file

@ -2,15 +2,16 @@ use {
crate::{
backend::hbvm::{
reg::{self, Reg},
HbvmBackend, Nid, Nodes, PLoc,
HbvmBackend, Nid, Nodes, PLoc, Reloc, TypedReloc,
},
lexer::TokenKind,
parser, quad_sort,
son::{Kind, ARG_START, MEM, VOID},
ty::{self, Arg, Loc, Module, Sig, Types},
ty::{self, Arg, Loc, Module, Offset, Sig, Types},
utils::{BitSet, EntSlice},
},
alloc::{borrow::ToOwned, vec::Vec},
core::{assert_matches::debug_assert_matches, mem, ops::Range},
core::{assert_matches::debug_assert_matches, mem, ops::Range, usize},
hbbytecode::{self as instrs},
};
@ -31,7 +32,7 @@ impl HbvmBackend {
let mut res = mem::take(&mut self.ralloc);
Regalloc::run(nodes, &mut res);
Regalloc::run(nodes, tys, &mut res);
'_open_function: {
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
@ -106,8 +107,7 @@ impl HbvmBackend {
let node = &nodes[nid];
alloc_buf.clear();
let atr = |allc: Nid| {
let allc = strip_load(allc);
let assert_alloc_use = |allc: Nid| {
debug_assert!(
nodes.is_unlocked(allc),
"{:?} {}",
@ -125,18 +125,46 @@ impl HbvmBackend {
nodes[nid],
nodes[allc]
);
};
let atr = |allc: Nid| {
let allc = strip_load(allc);
assert_alloc_use(allc);
res.node_to_reg[allc as usize]
};
let mut is_next_block = false;
let offset_atr = |allc: Nid, offsets: &[Offset]| {
let allc = strip_load(allc);
if nodes.is_locked(allc) && nodes[allc].kind == Kind::Stck {
return (reg::STACK_PTR, offsets[allc as usize] as u64);
}
assert_alloc_use(allc);
(res.node_to_reg[allc as usize], 0)
};
match node.kind {
Kind::Mem => self.emit(instrs::cp(atr(MEM), reg::RET)),
Kind::Arg => {}
Kind::If => {
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
if nodes.cond_op(cnd).is_some() {
if let Some((op, swapped)) = nodes.cond_op(cnd) {
let &[_, lh, rh] = nodes[cnd].inputs.as_slice() else { unreachable!() };
alloc_buf.extend([atr(lh), atr(rh)]);
let [lhs, rhs] = [atr(lh), atr(rh)];
self.extend(nodes[lh].ty, nodes[lh].ty.extend(), lhs, tys, files);
self.extend(nodes[rh].ty, nodes[rh].ty.extend(), rhs, tys, files);
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[!swapped as usize], rel));
self.emit(op(lhs, rhs, 0));
} else {
alloc_buf.push(atr(cnd));
let cd = atr(cnd);
debug_assert_eq!(nodes[node.outputs[0]].kind, Kind::Then);
self.extend(nodes[cnd].ty, nodes[cnd].ty.extend(), cd, tys, files);
let rel = Reloc::new(self.code.len(), 3, 2);
self.jump_relocs.push((node.outputs[0], rel));
self.emit(instrs::jne(cd, reg::ZERO, 0));
}
}
Kind::Loop | Kind::Region => {
@ -217,39 +245,132 @@ impl HbvmBackend {
}
}
is_next_block = res.backrefs[nid as usize] as usize == i + 1;
if res.backrefs[nid as usize] as usize != i + 1 {
let rel = Reloc::new(self.code.len(), 1, 4);
self.jump_relocs.push((nid, rel));
self.emit(instrs::jmp(0));
}
}
Kind::Return { .. } => {
let &[_, ret, ..] = node.inputs.as_slice() else { unreachable!() };
match retl {
Some(PLoc::Reg(r, _)) if sig.ret.loc(tys) == Loc::Reg => {
None => {}
Some(PLoc::Reg(r, size)) if sig.ret.loc(tys) == Loc::Stack => {
// TODO: handle the stack load
self.emit(instrs::ld(r, atr(ret), 0, size))
}
Some(PLoc::WideReg(r, size)) => {
// TODO: handle the stack load
self.emit(instrs::ld(r, atr(ret), 0, size))
}
Some(PLoc::Reg(r, _)) => {
alloc_buf.push(atr(ret));
self.emit(instrs::cp(r, atr(ret)));
}
Some(PLoc::Ref(..)) => alloc_buf.extend([atr(ret), atr(MEM)]),
Some(_) => alloc_buf.push(atr(ret)),
None => {}
Some(PLoc::Ref(_, size)) => {
let [src, dst] = [atr(ret), atr(MEM)];
if let Ok(size) = u16::try_from(size) {
self.emit(instrs::bmc(src, dst, size));
} else {
for _ in 0..size / u16::MAX as u32 {
self.emit(instrs::bmc(src, dst, u16::MAX));
self.emit(instrs::addi64(src, src, u16::MAX as _));
self.emit(instrs::addi64(dst, dst, u16::MAX as _));
}
self.emit(instrs::bmc(src, dst, size as u16));
self.emit(instrs::addi64(src, src, size.wrapping_neg() as _));
self.emit(instrs::addi64(dst, dst, size.wrapping_neg() as _));
}
}
}
if i != res.blocks.len() - 1 {
let rel = Reloc::new(self.code.len(), 1, 4);
self.ret_relocs.push(rel);
self.emit(instrs::jmp(0));
}
}
Kind::Die => {}
Kind::CInt { .. } => alloc_buf.push(atr(nid)),
Kind::UnOp { .. } => alloc_buf.extend([atr(nid), atr(node.inputs[1])]),
Kind::Die => {
self.emit(instrs::un());
}
Kind::CInt { value: 0 } => self.emit(instrs::cp(atr(nid), reg::ZERO)),
Kind::CInt { value } if node.ty == ty::Id::F32 => {
self.emit(instrs::li32(
atr(nid),
(f64::from_bits(value as _) as f32).to_bits(),
));
}
Kind::CInt { value } => self.emit(match tys.size_of(node.ty) {
1 => instrs::li8(atr(nid), value as _),
2 => instrs::li16(atr(nid), value as _),
4 => instrs::li32(atr(nid), value as _),
_ => instrs::li64(atr(nid), value as _),
}),
Kind::UnOp { op } => {
let op = op
.unop(
node.ty,
tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty),
)
.unwrap_or_else(|| {
panic!(
"TODO: unary operator not supported: {op} {} {}",
ty::Display::new(tys, files, node.ty),
ty::Display::new(
tys,
files,
tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty)
)
)
});
self.emit(op(atr(nid), atr(node.inputs[1])));
}
Kind::BinOp { op } => {
let &[.., lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
if let Kind::CInt { .. } = nodes[rhs].kind
if let Kind::CInt { value } = nodes[rhs].kind
&& nodes.is_locked(rhs)
&& op.imm_binop(node.ty).is_some()
&& let Some(op) = op.imm_binop(node.ty)
{
alloc_buf.extend([atr(nid), atr(lhs)]);
self.emit(op(atr(nid), atr(lhs), value as _));
} else if let Some(against) = op.cmp_against() {
let op_ty = nodes[rhs].ty;
let [dst, lhs, rhs] = [atr(nid), atr(lhs), atr(rhs)];
if let Some(op) = op.float_cmp(op_ty) {
self.emit(op(dst, lhs, rhs));
} else if op_ty.is_float()
&& matches!(op, TokenKind::Le | TokenKind::Ge)
{
let op = match op {
TokenKind::Le => TokenKind::Gt,
TokenKind::Ge => TokenKind::Lt,
_ => unreachable!(),
};
let op_fn = op.float_cmp(op_ty).unwrap();
self.emit(op_fn(dst, lhs, rhs));
self.emit(instrs::not(dst, dst));
} else {
let op_fn =
if op_ty.is_signed() { instrs::cmps } else { instrs::cmpu };
self.emit(op_fn(dst, lhs, rhs));
self.emit(instrs::cmpui(dst, dst, against));
if matches!(op, TokenKind::Eq | TokenKind::Lt | TokenKind::Gt) {
self.emit(instrs::not(dst, dst));
}
}
} else if let Some(op) = op.binop(node.ty) {
let [dst, lhs, rhs] = [atr(nid), atr(lhs), atr(rhs)];
self.emit(op(dst, lhs, rhs));
} else {
alloc_buf.extend([atr(nid), atr(lhs), atr(rhs)]);
todo!("unhandled operator: {op}");
}
}
Kind::Call { args, .. } => {
Kind::Call { args, func } => {
let (ret, mut parama) = tys.parama(node.ty);
if ret.is_some() {
alloc_buf.push(atr(nid));
if let Some(PLoc::Ref(r, ..)) = ret {
self.emit(instrs::cp(r, atr(*node.inputs.last().unwrap())))
}
let mut args = args.args();
let mut allocs = node.inputs[1..].iter();
@ -257,68 +378,90 @@ impl HbvmBackend {
let Arg::Value(ty) = arg else { continue };
let Some(loc) = parama.next(ty, tys) else { continue };
let arg = *allocs.next().unwrap();
alloc_buf.push(atr(arg));
match loc {
PLoc::Reg(..) if ty.loc(tys) == Loc::Stack => {}
PLoc::WideReg(..) => alloc_buf.push(0),
PLoc::Reg(r, ..) | PLoc::Ref(r, ..) => {
self.emit(instrs::cp(r, atr(arg)))
let &arg = allocs.next().unwrap();
let (rg, size) = match loc {
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
PLoc::WideReg(rg, size) => (rg, size),
PLoc::Ref(r, ..) | PLoc::Reg(r, ..) => {
self.emit(instrs::cp(r, atr(arg)));
continue;
}
};
let (src, off) = offset_atr(arg, &self.offsets);
self.emit(instrs::ld(rg, src, off, size));
}
if node.ty.loc(tys) == Loc::Stack {
alloc_buf.push(atr(*node.inputs.last().unwrap()));
if func == ty::Func::ECA {
self.emit(instrs::eca());
} else {
self.relocs.push(TypedReloc {
target: func.into(),
reloc: Reloc::new(self.code.len(), 3, 4),
});
self.emit(instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
}
if let Some(PLoc::Ref(r, ..)) = ret {
self.emit(instrs::cp(r, *alloc_buf.last().unwrap()))
if node.ty.loc(tys) == Loc::Stack
&& let Some(PLoc::Reg(r, size) | PLoc::WideReg(r, size)) = ret
{
self.emit(instrs::st(r, atr(*node.inputs.last().unwrap()), 0, size));
}
}
Kind::Stck | Kind::Global { .. } => alloc_buf.push(atr(nid)),
Kind::Global { global } => {
let reloc = Reloc::new(self.code.len(), 3, 4);
self.relocs.push(TypedReloc { target: global.into(), reloc });
self.emit(instrs::lra(atr(nid), 0, 0));
}
Kind::Stck => {
let base = reg::STACK_PTR;
let offset = self.offsets[nid as usize];
self.emit(instrs::addi64(atr(nid), base, offset as _));
}
Kind::Load => {
let (region, _) = nodes.strip_offset(node.inputs[1], node.ty, tys);
let (region, offset) = nodes.strip_offset(node.inputs[1], node.ty, tys);
let size = tys.size_of(node.ty);
if node.ty.loc(tys) != Loc::Stack {
alloc_buf.push(atr(nid));
match nodes[region].kind {
Kind::Stck => {}
_ => alloc_buf.push(atr(region)),
}
let (base, offset) = match nodes[region].kind {
Kind::Stck => {
(reg::STACK_PTR, self.offsets[region as usize] + offset)
}
_ => (atr(region), offset),
};
self.emit(instrs::ld(atr(nid), base, offset as _, size as _));
}
}
Kind::Stre if node.inputs[1] == VOID => {}
Kind::Stre => {
let (region, _) = nodes.strip_offset(node.inputs[2], node.ty, tys);
match nodes[region].kind {
Kind::Stck if node.ty.loc(tys) == Loc::Reg => {
alloc_buf.push(atr(node.inputs[1]))
debug_assert_ne!(node.inputs[1], VOID);
let (region, offset) = nodes.strip_offset(node.inputs[2], node.ty, tys);
let size = u16::try_from(tys.size_of(node.ty)).expect("TODO");
let (base, offset, src) = match nodes[region].kind {
Kind::Stck if node.ty.loc(tys) == Loc::Reg => (
reg::STACK_PTR,
self.offsets[region as usize] + offset,
atr(node.inputs[1]),
),
_ => (atr(region), offset, atr(node.inputs[1])),
};
match node.ty.loc(tys) {
Loc::Reg => self.emit(instrs::st(src, base, offset as _, size)),
Loc::Stack => {
debug_assert_eq!(offset, 0);
self.emit(instrs::bmc(src, base, size))
}
_ => alloc_buf.extend([atr(region), atr(node.inputs[1])]),
}
}
Kind::Mem => {
self.emit(instrs::cp(atr(MEM), reg::RET));
continue;
}
Kind::Arg => {
continue;
}
_ => {}
e @ (Kind::Start
| Kind::Assert { .. }
| Kind::Entry
| Kind::End
| Kind::Loops
| Kind::Then
| Kind::Else
| Kind::Phi
| Kind::Join) => unreachable!("{e:?}"),
}
self.emit_instr(super::InstrCtx {
nid,
sig,
is_next_block,
is_last_block: i == res.blocks.len() - 1,
retl,
allocs: &alloc_buf,
nodes,
tys,
files,
});
if let Kind::Call { .. } = node.kind {
let (ret, ..) = tys.parama(node.ty);
@ -537,6 +680,37 @@ impl Nodes {
self.len()
}
pub fn is_data_dep(&self, val: Nid, user: Nid, #[expect(unused)] types: &Types) -> bool {
match self[user].kind {
Kind::Return { .. } => self[user].inputs[1] == val,
_ if self.is_cfg(user) && !matches!(self[user].kind, Kind::Call { .. } | Kind::If) => {
false
}
Kind::Join => false,
Kind::Stre => {
debug_assert_eq!(
self[user].inputs[4..]
.iter()
.filter(|&&v| self[v].kind != Kind::Load)
.copied()
.collect::<Vec<_>>(),
vec![]
);
debug_assert_matches!(
self[self[user].inputs[3]].kind,
Kind::Stre | Kind::Mem | Kind::Phi
);
self[user].inputs.iter().position(|&v| v == val).is_some_and(|v| v < 3)
}
//Kind::Call { .. } => {
// self[val].kind != Kind::Load
// || matches!(types.parama(self[val].ty).0, Some(PLoc::Ref(..)))
//}
Kind::Load => self[user].inputs[2] != val,
_ => self[user].inputs[0] != val || self[user].inputs[1..].contains(&val),
}
}
fn use_block_of(&self, inst: Nid, uinst: Nid) -> Nid {
let mut block = self.use_block(inst, uinst, None);
while !self[block].kind.starts_basic_block() {
@ -568,7 +742,11 @@ impl Nodes {
nid
}
fn uses_of(&self, nid: Nid) -> impl Iterator<Item = (Nid, Nid)> + use<'_> {
fn uses_of<'a>(
&'a self,
nid: Nid,
types: &'a Types,
) -> impl Iterator<Item = (Nid, Nid)> + use<'a> {
if self[nid].kind.is_cfg() && !matches!(self[nid].kind, Kind::Call { .. }) {
return None.into_iter().flatten();
}
@ -577,10 +755,10 @@ impl Nodes {
self[nid]
.outputs
.iter()
.filter(move |&&n| self.is_data_dep(nid, n))
.filter(move |&&n| self.is_data_dep(nid, n, types))
.map(move |n| self.this_or_delegates(nid, n))
.flat_map(|(p, ls)| ls.iter().map(move |l| (p, l)))
.filter(|&(o, &n)| self.is_data_dep(o, n))
.filter(|&(o, &n)| self.is_data_dep(o, n, types))
.map(|(p, &n)| (self.use_block_of(p, n), n))
.inspect(|&(_, n)| debug_assert!(self.is_unlocked(n))),
)
@ -591,6 +769,7 @@ impl Nodes {
struct Regalloc<'a> {
nodes: &'a Nodes,
tys: &'a Types,
res: &'a mut Res,
}
@ -608,8 +787,8 @@ impl<'a> Regalloc<'a> {
self.res.backrefs[nid as usize]
}
fn run(ctx: &'a Nodes, res: &'a mut Res) {
Self { nodes: ctx, res }.run_low();
fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res) {
Self { nodes: ctx, tys, res }.run_low();
}
fn run_low(&mut self) {
@ -657,7 +836,7 @@ impl<'a> Regalloc<'a> {
fn collect_bundle(&mut self, inst: Nid, into: &mut Bundle) {
let dom = self.nodes.idom_of(inst);
self.res.dfs_seem.clear(self.nodes.len());
for (cursor, uinst) in self.nodes.uses_of(inst) {
for (cursor, uinst) in self.nodes.uses_of(inst, self.tys) {
if !self.res.dfs_seem.set(uinst) {
continue;
}

View file

@ -1898,7 +1898,7 @@ impl Nodes {
log::info!("{out}");
}
fn is_cfg(&self, o: Nid) -> bool {
pub fn is_cfg(&self, o: Nid) -> bool {
self[o].kind.is_cfg()
}
@ -2054,33 +2054,6 @@ impl Nodes {
}
}
pub fn is_data_dep(&self, val: Nid, user: Nid) -> bool {
match self[user].kind {
Kind::Return { .. } => self[user].inputs[1] == val,
_ if self.is_cfg(user) && !matches!(self[user].kind, Kind::Call { .. } | Kind::If) => {
false
}
Kind::Join => false,
Kind::Stre => {
debug_assert_eq!(
self[user].inputs[4..]
.iter()
.filter(|&&v| self[v].kind != Kind::Load)
.copied()
.collect::<Vec<_>>(),
vec![]
);
debug_assert_matches!(
self[self[user].inputs[3]].kind,
Kind::Stre | Kind::Mem | Kind::Phi
);
self[user].inputs.iter().position(|&v| v == val).is_some_and(|v| v < 3)
}
Kind::Load => self[user].inputs[2] != val,
_ => self[user].inputs[0] != val || self[user].inputs[1..].contains(&val),
}
}
pub fn this_or_delegates<'a>(&'a self, source: Nid, target: &'a Nid) -> (Nid, &'a [Nid]) {
if self.is_unlocked(*target) {
(source, core::slice::from_ref(target))
@ -2193,7 +2166,7 @@ pub enum Kind {
}
impl Kind {
fn is_call(&self) -> bool {
pub fn is_call(&self) -> bool {
matches!(self, Kind::Call { .. })
}

View file

@ -1,22 +1,21 @@
main:
ADDI64 r254, r254, -16d
LI64 r13, 10d
ADDI64 r14, r254, 0d
ST r13, r254, 0a, 8h
LI64 r13, 20d
ST r13, r254, 8a, 8h
LI64 r13, 6d
LI64 r15, 5d
LI64 r16, 1d
CP r2, r16
CP r5, r15
LI64 r14, 5d
LI64 r15, 1d
CP r2, r15
LD r3, r254, 0a, 16h
CP r5, r14
CP r6, r13
LD r3, r14, 0a, 16h
ECA
CP r1, r0
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
ev: Ecall
code size: 154
code size: 143
ret: 0
status: Ok(())

View file

@ -1,29 +1,26 @@
main:
ADDI64 r254, r254, -122d
ST r31, r254, 58a, 64h
ADDI64 r254, r254, -98d
ST r31, r254, 58a, 40h
ADDI64 r32, r254, 33d
ADDI64 r33, r254, 34d
ADDI64 r34, r254, 1d
ADDI64 r35, r254, 17d
ST r32, r254, 34a, 8h
LI64 r36, 100d
ADDI64 r37, r254, 0d
LI8 r38, 1b
LI64 r34, 100d
LI8 r35, 1b
ST r0, r254, 1a, 8h
ST r0, r254, 17a, 8h
ST r36, r254, 42a, 8h
ST r38, r254, 0a, 1h
ST r34, r254, 42a, 8h
ST r35, r254, 0a, 1h
ST r0, r254, 9a, 8h
ST r0, r254, 25a, 8h
ST r36, r254, 50a, 8h
ST r34, r254, 50a, 8h
ST r0, r254, 33a, 1h
CP r2, r33
LD r3, r35, 0a, 16h
LD r5, r34, 0a, 16h
LD r7, r37, 0a, 1h
LD r3, r254, 17a, 16h
LD r5, r254, 1a, 16h
LD r7, r254, 0a, 1h
JAL r31, r0, :put_filled_rect
LD r31, r254, 58a, 64h
ADDI64 r254, r254, 122d
LD r31, r254, 58a, 40h
ADDI64 r254, r254, 98d
JALA r0, r31, 0a
put_filled_rect:
ADDI64 r254, r254, -108d
@ -99,6 +96,6 @@ put_filled_rect:
JMP :3
2: ADDI64 r254, r254, 108d
JALA r0, r31, 0a
code size: 875
code size: 842
ret: 0
status: Ok(())

View file

@ -13,14 +13,14 @@ main:
ADDI64 r254, r254, -120d
ST r31, r254, 72a, 48h
ADDI64 r32, r254, 48d
CP r2, r0
CP r1, r32
CP r2, r0
JAL r31, r0, :decide
ADDI64 r34, r254, 24d
BMC r32, r34, 24h
LI64 r35, 1d
CP r2, r35
CP r1, r34
CP r2, r35
JAL r31, r0, :decide
ADDI64 r36, r254, 0d
BMC r32, r36, 24h

View file

@ -1,21 +1,19 @@
main:
ADDI64 r254, r254, -72d
ST r31, r254, 32a, 40h
ADDI64 r32, r254, 8d
LRA r33, r0, :"Goodbye, World!\0"
ADDI64 r34, r254, 24d
LRA r35, r0, :"Hello, World!\0"
ST r33, r254, 8a, 8h
ST r35, r254, 24a, 8h
LD r2, r34, 0a, 8h
LD r3, r32, 0a, 8h
LRA r32, r0, :"Goodbye, World!\0"
LRA r33, r0, :"Hello, World!\0"
ST r32, r254, 8a, 8h
ST r33, r254, 24a, 8h
LD r2, r254, 24a, 8h
LD r3, r254, 8a, 8h
JAL r31, r0, :print
ADDI64 r32, r254, 0d
ADDI64 r34, r254, 16d
ST r33, r254, 0a, 8h
ST r35, r254, 16a, 8h
CP r2, r34
CP r3, r32
ADDI64 r34, r254, 0d
ADDI64 r35, r254, 16d
ST r32, r254, 0a, 8h
ST r33, r254, 16a, 8h
CP r2, r35
CP r3, r34
JAL r31, r0, :print2
LD r31, r254, 32a, 40h
ADDI64 r254, r254, 72d
@ -56,6 +54,6 @@ Hello, World!
Goodbye, World!
Hello, World!
Goodbye, World!
code size: 457
code size: 435
ret: 0
status: Ok(())

View file

@ -59,8 +59,8 @@ main:
CP r1, r32
JMP :3
10: LRA r36, r0, :"foo\0"
CP r4, r36
LD r2, r33, 0a, 16h
CP r4, r36
JAL r31, r0, :use_foo
ADDI64 r33, r254, 0d
JAL r31, r0, :no_foo
@ -78,8 +78,8 @@ main:
CP r1, r32
JMP :3
13: ADDI64 r35, r254, 16d
CP r2, r34
CP r1, r35
CP r2, r34
JAL r31, r0, :new_bar
JAL r31, r0, :decide
CP r34, r1

View file

@ -2,17 +2,16 @@ main:
ADDI64 r254, r254, -64d
ST r31, r254, 32a, 32h
LI64 r32, 4d
ADDI64 r33, r254, 16d
ST r32, r254, 16a, 8h
LI64 r32, 3d
ST r32, r254, 24a, 8h
ADDI64 r34, r254, 0d
LD r3, r33, 0a, 16h
ADDI64 r33, r254, 0d
LD r3, r254, 16a, 16h
JAL r31, r0, :odher_pass
ST r1, r34, 0a, 16h
LD r33, r254, 8a, 8h
JNE r33, r32, :0
CP r2, r34
ST r1, r33, 0a, 16h
LD r34, r254, 8a, 8h
JNE r34, r32, :0
CP r2, r33
JAL r31, r0, :pass
CP r32, r1
CP r1, r32
@ -34,6 +33,6 @@ pass:
LD r13, r13, 0a, 8h
CP r1, r13
JALA r0, r31, 0a
code size: 313
code size: 302
ret: 4
status: Ok(())