1020 lines
41 KiB
Rust
1020 lines
41 KiB
Rust
#![feature(if_let_guard)]
|
|
#![feature(slice_take)]
|
|
use {
|
|
core::panic,
|
|
cranelift_codegen::{
|
|
self as cc, CodegenError, Final, FinalizedMachReloc, MachBufferFinalized,
|
|
ir::{
|
|
self as cir, InstBuilder, MemFlags, TrapCode, UserExternalName,
|
|
condcodes::{self, IntCC},
|
|
},
|
|
isa::{LookupError, TargetIsa},
|
|
settings::{Configurable, SetError},
|
|
},
|
|
cranelift_frontend::{self as cf, FunctionBuilder},
|
|
cranelift_module::{self as cm, Module, ModuleError},
|
|
hblang::{
|
|
lexer::TokenKind,
|
|
nodes::{self as hbnodes},
|
|
ty as hbty,
|
|
utils::{self as hbutils, Ent, EntVec},
|
|
},
|
|
std::{
|
|
collections::HashSet,
|
|
fmt::{Display, Write},
|
|
ops::Range,
|
|
},
|
|
};
|
|
|
|
mod x86_64;
|
|
|
|
pub struct Backend {
|
|
ctx: cc::Context,
|
|
dt_ctx: cm::DataDescription,
|
|
fb_ctx: cf::FunctionBuilderContext,
|
|
module: Option<cranelift_object::ObjectModule>,
|
|
ctrl_plane: cc::control::ControlPlane,
|
|
funcs: Functions,
|
|
globals: EntVec<hbty::Global, Global>,
|
|
asm: Assembler,
|
|
}
|
|
|
|
impl Backend {
|
|
pub fn new(triple: target_lexicon::Triple, flags: &str) -> Result<Self, BackendCreationError> {
|
|
Ok(Self {
|
|
ctx: cc::Context::new(),
|
|
dt_ctx: cm::DataDescription::new(),
|
|
fb_ctx: cf::FunctionBuilderContext::default(),
|
|
ctrl_plane: cc::control::ControlPlane::default(),
|
|
module: cranelift_object::ObjectModule::new(cranelift_object::ObjectBuilder::new(
|
|
cc::isa::lookup(triple)?.finish(cc::settings::Flags::new({
|
|
let mut bl = cc::settings::builder();
|
|
for (k, v) in flags.split(',').filter_map(|s| s.split_once('=')) {
|
|
bl.set(k, v).map_err(|err| BackendCreationError::InvalidFlag {
|
|
key: k.to_owned(),
|
|
value: v.to_owned(),
|
|
err,
|
|
})?;
|
|
}
|
|
bl
|
|
}))?,
|
|
"main",
|
|
cm::default_libcall_names(),
|
|
)?)
|
|
.into(),
|
|
funcs: Default::default(),
|
|
globals: Default::default(),
|
|
asm: Default::default(),
|
|
})
|
|
}
|
|
}
|
|
|
|
impl hblang::backend::Backend for Backend {
|
|
fn triple(&self) -> String {
|
|
self.module.as_ref().unwrap().isa().triple().to_string()
|
|
}
|
|
|
|
fn assemble_reachable(
|
|
&mut self,
|
|
from: hbty::Func,
|
|
types: &hbty::Types,
|
|
files: &hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
|
|
to: &mut Vec<u8>,
|
|
) -> hblang::backend::AssemblySpec {
|
|
debug_assert!(self.asm.frontier.is_empty());
|
|
debug_assert!(self.asm.funcs.is_empty());
|
|
debug_assert!(self.asm.globals.is_empty());
|
|
|
|
let mut module = self.module.take().expect("backend can assemble only once");
|
|
|
|
fn clif_name_to_ty(name: UserExternalName) -> hbty::Id {
|
|
match name.namespace {
|
|
0 => hbty::Kind::Func(hbty::Func::new(name.index as _)),
|
|
1 => hbty::Kind::Global(hbty::Global::new(name.index as _)),
|
|
_ => unreachable!(),
|
|
}
|
|
.compress()
|
|
}
|
|
|
|
self.globals.shadow(types.ins.globals.len());
|
|
|
|
let mut seen_names = HashSet::new();
|
|
|
|
self.asm.frontier.push(from.into());
|
|
while let Some(itm) = self.asm.frontier.pop() {
|
|
match itm.expand() {
|
|
hbty::Kind::Func(func) => {
|
|
let fd = &types.ins.funcs[func];
|
|
if fd.is_import {
|
|
self.funcs.headers.shadow(func.index() + 1);
|
|
}
|
|
let fuc = &mut self.funcs.headers[func];
|
|
let file = &files[fd.file];
|
|
if fuc.module_id.is_some() {
|
|
continue;
|
|
}
|
|
self.asm.frontier.extend(
|
|
fuc.external_names.clone().map(|r| {
|
|
clif_name_to_ty(self.funcs.external_names[r as usize].clone())
|
|
}),
|
|
);
|
|
self.asm.name.clear();
|
|
if func == from {
|
|
self.asm.name.push_str("main");
|
|
} else if fd.is_import {
|
|
self.asm.name.push_str(file.ident_str(fd.name));
|
|
} else {
|
|
self.asm.name.push_str(hblang::strip_cwd(&file.path));
|
|
self.asm.name.push('.');
|
|
if fd.parent != hbty::Id::from(fd.file) {
|
|
write!(
|
|
self.asm.name,
|
|
"{}",
|
|
hbty::Display::new(types, files, fd.parent)
|
|
)
|
|
.unwrap();
|
|
}
|
|
self.asm.name.push_str(file.ident_str(fd.name));
|
|
if fd.is_generic {
|
|
let mut args = fd.sig.args.args();
|
|
self.asm.name.push('(');
|
|
while let Some(arg) = args.next(types) {
|
|
let (hbty::Arg::Value(ty) | hbty::Arg::Type(ty)) = arg;
|
|
write!(self.asm.name, "{},", hbty::Display::new(types, files, ty))
|
|
.unwrap();
|
|
}
|
|
debug_assert!(!self.asm.name.ends_with('('));
|
|
self.asm.name.pop().unwrap();
|
|
self.asm.name.push(')');
|
|
}
|
|
}
|
|
let linkage = if func == from {
|
|
cm::Linkage::Export
|
|
} else if fd.is_import {
|
|
cm::Linkage::Import
|
|
} else {
|
|
cm::Linkage::Local
|
|
};
|
|
build_signature(
|
|
module.isa().default_call_conv(),
|
|
fd.sig,
|
|
types,
|
|
&mut self.ctx.func.signature,
|
|
&mut vec![],
|
|
);
|
|
debug_assert!(seen_names.insert(self.asm.name.clone()), "{}", self.asm.name);
|
|
fuc.module_id = Some(
|
|
module
|
|
.declare_function(&self.asm.name, linkage, &self.ctx.func.signature)
|
|
.unwrap(),
|
|
);
|
|
if !fd.is_import {
|
|
self.asm.funcs.push(func);
|
|
}
|
|
}
|
|
hbty::Kind::Global(glob) => {
|
|
if self.globals[glob].module_id.is_some() {
|
|
continue;
|
|
}
|
|
self.asm.globals.push(glob);
|
|
|
|
self.asm.name.clear();
|
|
let mutable = if types.ins.globals[glob].file == Default::default() {
|
|
writeln!(self.asm.name, "anon{}", glob.index()).unwrap();
|
|
false
|
|
} else {
|
|
let file = &files[types.ins.globals[glob].file];
|
|
self.asm.name.push_str(hblang::strip_cwd(&file.path));
|
|
self.asm.name.push('.');
|
|
self.asm.name.push_str(file.ident_str(types.ins.globals[glob].name));
|
|
true
|
|
};
|
|
|
|
self.globals[glob].module_id = Some(
|
|
module
|
|
.declare_data(&self.asm.name, cm::Linkage::Local, mutable, false)
|
|
.unwrap(),
|
|
);
|
|
}
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
|
|
for &func in &self.asm.funcs {
|
|
let fuc = &self.funcs.headers[func];
|
|
assert!(!types.ins.funcs[func].is_import);
|
|
debug_assert!(!fuc.code.is_empty());
|
|
let names = &mut self.funcs.external_names
|
|
[fuc.external_names.start as usize..fuc.external_names.end as usize];
|
|
self.ctx.func.clear();
|
|
names.iter().for_each(|nm| {
|
|
let mut nm = nm.clone();
|
|
if nm.namespace == 0 {
|
|
nm.index = self.funcs.headers[hbty::Func::new(nm.index as _)]
|
|
.module_id
|
|
.unwrap()
|
|
.as_u32();
|
|
} else {
|
|
nm.index =
|
|
self.globals[hbty::Global::new(nm.index as _)].module_id.unwrap().as_u32();
|
|
}
|
|
let prev_len = self.ctx.func.params.user_named_funcs().len();
|
|
self.ctx.func.params.ensure_user_func_name(nm.clone());
|
|
debug_assert_ne!(self.ctx.func.params.user_named_funcs().len(), prev_len, "{}", nm);
|
|
});
|
|
module
|
|
.define_function_bytes(
|
|
fuc.module_id.unwrap(),
|
|
&self.ctx.func,
|
|
fuc.alignment as _,
|
|
&self.funcs.code[fuc.code.start as usize..fuc.code.end as usize],
|
|
&self.funcs.relocs[fuc.relocs.start as usize..fuc.relocs.end as usize],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
for global in self.asm.globals.drain(..) {
|
|
let glob = &self.globals[global];
|
|
self.dt_ctx.clear();
|
|
self.dt_ctx.define(types.ins.globals[global].data.clone().into());
|
|
module.define_data(glob.module_id.unwrap(), &self.dt_ctx).unwrap();
|
|
}
|
|
|
|
module.finish().object.write_stream(to).unwrap();
|
|
|
|
hblang::backend::AssemblySpec { code_length: 0, data_length: 0, entry: 0 }
|
|
}
|
|
|
|
fn disasm<'a>(
|
|
&'a self,
|
|
_sluce: &[u8],
|
|
_eca_handler: &mut dyn FnMut(&mut &[u8]),
|
|
_types: &'a hbty::Types,
|
|
_files: &'a hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
|
|
_output: &mut String,
|
|
) -> Result<(), std::boxed::Box<dyn core::error::Error + Send + Sync + 'a>> {
|
|
unimplemented!()
|
|
}
|
|
|
|
fn emit_body(
|
|
&mut self,
|
|
id: hbty::Func,
|
|
nodes: &hbnodes::Nodes,
|
|
tys: &hbty::Types,
|
|
files: &hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
|
|
) {
|
|
let isa = self.module.as_ref().unwrap().isa();
|
|
|
|
let mut lens = vec![];
|
|
let stack_ret = build_signature(
|
|
isa.default_call_conv(),
|
|
tys.ins.funcs[id].sig,
|
|
tys,
|
|
&mut self.ctx.func.signature,
|
|
&mut lens,
|
|
);
|
|
|
|
FuncBuilder {
|
|
bl: FunctionBuilder::new(&mut self.ctx.func, &mut self.fb_ctx),
|
|
isa,
|
|
nodes,
|
|
tys,
|
|
files,
|
|
values: &mut vec![None; nodes.len()],
|
|
arg_lens: &lens,
|
|
stack_ret,
|
|
}
|
|
.build(tys.ins.funcs[id].sig);
|
|
|
|
self.ctx.func.name =
|
|
cir::UserFuncName::User(cir::UserExternalName { namespace: 0, index: id.index() as _ });
|
|
|
|
//if files[tys.ins.funcs[id].file].ident_str(tys.ins.funcs[id].name) == "reverse" {
|
|
// // std::eprintln!("{}", self.ctx.func.display());
|
|
//}
|
|
|
|
self.ctx.compile(isa, &mut self.ctrl_plane).unwrap();
|
|
let code = self.ctx.compiled_code().unwrap();
|
|
self.funcs.push(id, &self.ctx.func, &code.buffer);
|
|
self.ctx.clear();
|
|
}
|
|
}
|
|
|
|
fn build_signature(
|
|
call_conv: cc::isa::CallConv,
|
|
sig: hbty::Sig,
|
|
types: &hbty::Types,
|
|
signature: &mut cir::Signature,
|
|
arg_meta: &mut Vec<AbiMeta>,
|
|
) -> bool {
|
|
signature.clear(call_conv);
|
|
match call_conv {
|
|
cc::isa::CallConv::SystemV => {
|
|
x86_64::build_systemv_signature(sig, types, signature, arg_meta)
|
|
}
|
|
_ => todo!(),
|
|
}
|
|
}
|
|
|
|
#[derive(Clone, Copy)]
|
|
struct AbiMeta {
|
|
trough_mem: bool,
|
|
arg_count: usize,
|
|
}
|
|
|
|
struct FuncBuilder<'a, 'b> {
|
|
bl: cf::FunctionBuilder<'b>,
|
|
isa: &'a dyn TargetIsa,
|
|
nodes: &'a hbnodes::Nodes,
|
|
tys: &'a hbty::Types,
|
|
files: &'a hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
|
|
values: &'b mut [Option<Result<cir::Value, cir::Block>>],
|
|
arg_lens: &'a [AbiMeta],
|
|
stack_ret: bool,
|
|
}
|
|
|
|
impl FuncBuilder<'_, '_> {
|
|
pub fn build(mut self, sig: hbty::Sig) {
|
|
let entry = self.bl.create_block();
|
|
self.bl.append_block_params_for_function_params(entry);
|
|
self.bl.switch_to_block(entry);
|
|
let mut arg_vals = &self.bl.block_params(entry).to_vec()[..];
|
|
|
|
if self.stack_ret {
|
|
let ret_ptr = *arg_vals.take_first().unwrap();
|
|
self.values[hbnodes::Nid::MEM.index()] = Some(Ok(ret_ptr));
|
|
}
|
|
|
|
let Self { nodes, tys, .. } = self;
|
|
|
|
let mut parama_len = self.arg_lens[1..].iter();
|
|
let mut typs = sig.args.args();
|
|
let mut args = nodes[hbnodes::Nid::VOID].outputs[hbnodes::ARG_START..].iter();
|
|
while let Some(aty) = typs.next(tys) {
|
|
let hbty::Arg::Value(ty) = aty else { continue };
|
|
let abi_meta = parama_len.next().unwrap();
|
|
let &arg = args.next().unwrap();
|
|
if !abi_meta.trough_mem && ty.is_aggregate(tys) {
|
|
let slot = self.bl.create_sized_stack_slot(cir::StackSlotData {
|
|
kind: cir::StackSlotKind::ExplicitSlot,
|
|
size: self.tys.size_of(ty),
|
|
align_shift: self.tys.align_of(ty).ilog2() as _,
|
|
});
|
|
let loc = arg_vals.take(..abi_meta.arg_count).unwrap();
|
|
assert!(loc.len() <= 2, "NEED handling");
|
|
let align =
|
|
loc.iter().map(|&p| self.bl.func.dfg.value_type(p).bytes()).max().unwrap();
|
|
let mut offset = 0i32;
|
|
for &v in loc {
|
|
self.bl.ins().stack_store(v, slot, offset);
|
|
offset += align as i32;
|
|
}
|
|
self.values[arg.index()] =
|
|
Some(Ok(self.bl.ins().stack_addr(cir::types::I64, slot, 0)))
|
|
} else {
|
|
let loc = arg_vals.take(..abi_meta.arg_count).unwrap();
|
|
debug_assert_eq!(loc.len(), 1);
|
|
self.values[arg.index()] = Some(Ok(loc[0]));
|
|
}
|
|
}
|
|
|
|
self.values[hbnodes::Nid::ENTRY.index()] = Some(Err(entry));
|
|
|
|
self.emit_node(hbnodes::Nid::VOID, hbnodes::Nid::VOID);
|
|
|
|
self.bl.finalize();
|
|
}
|
|
|
|
fn value_of(&self, nid: hbnodes::Nid) -> cir::Value {
|
|
self.values[nid.index()].unwrap_or_else(|| panic!("{:?}", self.nodes[nid])).unwrap()
|
|
}
|
|
|
|
fn block_of(&self, nid: hbnodes::Nid) -> cir::Block {
|
|
self.values[nid.index()].unwrap().unwrap_err()
|
|
}
|
|
|
|
fn close_block(&mut self, nid: hbnodes::Nid) {
|
|
if matches!(self.nodes[nid].kind, hbnodes::Kind::Loop) {
|
|
return;
|
|
}
|
|
self.bl.seal_block(self.block_of(nid));
|
|
}
|
|
|
|
fn emit_node(&mut self, nid: hbnodes::Nid, block: hbnodes::Nid) {
|
|
use hbnodes::*;
|
|
|
|
let mut args = vec![];
|
|
if matches!(self.nodes[nid].kind, Kind::Region | Kind::Loop) {
|
|
let side = 1 + self.values[nid.index()].is_some() as usize;
|
|
for &o in self.nodes[nid].outputs.iter() {
|
|
if self.nodes[o].is_data_phi() {
|
|
args.push(self.value_of(self.nodes[o].inputs[side]));
|
|
}
|
|
}
|
|
match (self.nodes[nid].kind, self.values[nid.index()]) {
|
|
(Kind::Loop, Some(blck)) => {
|
|
self.bl.ins().jump(blck.unwrap_err(), &args);
|
|
self.bl.seal_block(blck.unwrap_err());
|
|
self.close_block(block);
|
|
return;
|
|
}
|
|
(Kind::Region, None) => {
|
|
let next = self.bl.create_block();
|
|
for &o in self.nodes[nid].outputs.iter() {
|
|
if self.nodes[o].is_data_phi() {
|
|
self.values[o.index()] = Some(Ok(self
|
|
.bl
|
|
.append_block_param(next, self.nodes[o].ty.to_clif(self.tys))));
|
|
}
|
|
}
|
|
self.bl.ins().jump(next, &args);
|
|
self.close_block(block);
|
|
self.values[nid.index()] = Some(Err(next));
|
|
return;
|
|
}
|
|
_ => {}
|
|
}
|
|
}
|
|
|
|
let node = &self.nodes[nid];
|
|
self.values[nid.index()] = Some(match node.kind {
|
|
Kind::Start => {
|
|
debug_assert_eq!(self.nodes[node.outputs[0]].kind, Kind::Entry);
|
|
self.emit_node(node.outputs[0], block);
|
|
return;
|
|
}
|
|
Kind::If => {
|
|
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
|
|
let &[then, else_] = node.outputs.as_slice() else { unreachable!() };
|
|
|
|
let then_bl = self.bl.create_block();
|
|
let else_bl = self.bl.create_block();
|
|
let c = self.value_of(cnd);
|
|
self.bl.ins().brif(c, then_bl, &[], else_bl, &[]);
|
|
self.values[then.index()] = Some(Err(then_bl));
|
|
self.values[else_.index()] = Some(Err(else_bl));
|
|
|
|
self.close_block(block);
|
|
self.bl.switch_to_block(then_bl);
|
|
self.emit_node(then, then);
|
|
self.bl.switch_to_block(else_bl);
|
|
self.emit_node(else_, else_);
|
|
Err(self.block_of(block))
|
|
}
|
|
Kind::Loop => {
|
|
let next = self.bl.create_block();
|
|
for &o in self.nodes[nid].outputs.iter() {
|
|
if self.nodes[o].is_data_phi() {
|
|
self.values[o.index()] = Some(Ok(self
|
|
.bl
|
|
.append_block_param(next, self.nodes[o].ty.to_clif(self.tys))));
|
|
}
|
|
}
|
|
self.values[nid.index()] = Some(Err(next));
|
|
self.bl.ins().jump(self.values[nid.index()].unwrap().unwrap_err(), &args);
|
|
self.close_block(block);
|
|
self.bl.switch_to_block(self.values[nid.index()].unwrap().unwrap_err());
|
|
for &o in node.outputs.iter().rev() {
|
|
self.emit_node(o, nid);
|
|
}
|
|
Err(self.block_of(block))
|
|
}
|
|
Kind::Region => {
|
|
self.bl.ins().jump(self.values[nid.index()].unwrap().unwrap_err(), &args);
|
|
self.close_block(block);
|
|
self.bl.switch_to_block(self.values[nid.index()].unwrap().unwrap_err());
|
|
for &o in node.outputs.iter().rev() {
|
|
self.emit_node(o, nid);
|
|
}
|
|
return;
|
|
}
|
|
Kind::Die => {
|
|
self.bl.ins().trap(TrapCode::unwrap_user(1));
|
|
self.close_block(block);
|
|
self.emit_node(node.outputs[0], block);
|
|
Err(self.block_of(block))
|
|
}
|
|
Kind::Return { .. } => {
|
|
let mut ir_args = vec![];
|
|
if node.inputs[1] == hbnodes::Nid::VOID {
|
|
} else {
|
|
let abi_meta = self.arg_lens[0];
|
|
let arg = node.inputs[1];
|
|
if !abi_meta.trough_mem && self.nodes[node.inputs[1]].ty.is_aggregate(self.tys)
|
|
{
|
|
let loc = self.bl.func.signature.returns.clone();
|
|
assert!(loc.len() <= 2, "NEED handling");
|
|
let align = loc.iter().map(|&p| p.value_type.bytes()).max().unwrap();
|
|
let mut offset = 0i32;
|
|
let src = self.value_of(self.nodes[arg].inputs[1]);
|
|
debug_assert!(self.nodes[arg].kind == Kind::Load);
|
|
for &v in &loc {
|
|
ir_args.push(self.bl.ins().load(
|
|
v.value_type,
|
|
MemFlags::new(),
|
|
src,
|
|
offset,
|
|
));
|
|
offset += align as i32;
|
|
}
|
|
} else if self.stack_ret {
|
|
let src = self.value_of(self.nodes[arg].inputs[1]);
|
|
let dest = self.value_of(Nid::MEM);
|
|
self.bl.emit_small_memory_copy(
|
|
self.isa.frontend_config(),
|
|
dest,
|
|
src,
|
|
self.tys.size_of(self.nodes[arg].ty) as _,
|
|
self.tys.align_of(self.nodes[arg].ty) as _,
|
|
self.tys.align_of(self.nodes[arg].ty) as _,
|
|
false,
|
|
MemFlags::new(),
|
|
);
|
|
} else {
|
|
self.ensure_truncated(self.bl.func.signature.returns[0], arg);
|
|
ir_args.push(self.value_of(arg));
|
|
}
|
|
}
|
|
|
|
self.bl.ins().return_(&ir_args);
|
|
self.close_block(block);
|
|
self.emit_node(node.outputs[0], block);
|
|
Err(self.block_of(block))
|
|
}
|
|
Kind::Entry => {
|
|
for &o in node.outputs.iter().rev() {
|
|
self.emit_node(o, nid);
|
|
}
|
|
return;
|
|
}
|
|
Kind::Then | Kind::Else => {
|
|
for &o in node.outputs.iter().rev() {
|
|
self.emit_node(o, block);
|
|
}
|
|
Err(self.block_of(block))
|
|
}
|
|
Kind::Call { func, unreachable, args } => {
|
|
assert_ne!(func, hbty::Func::ECA, "@eca is not supported");
|
|
|
|
if unreachable {
|
|
todo!()
|
|
} else {
|
|
let mut arg_lens = vec![];
|
|
let mut signature = cir::Signature::new(self.isa.default_call_conv());
|
|
let stack_ret = build_signature(
|
|
self.isa.default_call_conv(),
|
|
self.tys.ins.funcs[func].sig,
|
|
self.tys,
|
|
&mut signature,
|
|
&mut arg_lens,
|
|
);
|
|
|
|
let func_ref =
|
|
'b: {
|
|
let user_name_ref = self.bl.func.declare_imported_user_function(
|
|
cir::UserExternalName { namespace: 0, index: func.index() as _ },
|
|
);
|
|
|
|
if let Some(id) = self.bl.func.dfg.ext_funcs.keys().find(|&k| {
|
|
self.bl.func.dfg.ext_funcs[k].name
|
|
== cir::ExternalName::user(user_name_ref)
|
|
}) {
|
|
break 'b id;
|
|
}
|
|
|
|
let signature = self.bl.func.import_signature(signature.clone());
|
|
|
|
self.bl.func.import_function(cir::ExtFuncData {
|
|
name: cir::ExternalName::user(user_name_ref),
|
|
signature,
|
|
// somehow, this works
|
|
colocated: true, // !self.tys.ins.funcs[func].is_import,
|
|
})
|
|
};
|
|
|
|
let mut ir_args = vec![];
|
|
|
|
if stack_ret {
|
|
ir_args.push(self.value_of(*node.inputs.last().unwrap()));
|
|
}
|
|
|
|
let mut params = signature.params.as_slice();
|
|
let mut parama_len = arg_lens[1..].iter();
|
|
let mut typs = args.args();
|
|
let mut args = node.inputs[1..].iter();
|
|
while let Some(aty) = typs.next(self.tys) {
|
|
let hbty::Arg::Value(ty) = aty else { continue };
|
|
let abi_meta = parama_len.next().unwrap();
|
|
if abi_meta.arg_count == 0 {
|
|
continue;
|
|
}
|
|
let &arg = args.next().unwrap();
|
|
if !abi_meta.trough_mem && ty.is_aggregate(self.tys) {
|
|
let loc = params.take(..abi_meta.arg_count).unwrap();
|
|
assert!(loc.len() <= 2, "NEED handling");
|
|
let align = loc.iter().map(|&p| p.value_type.bytes()).max().unwrap();
|
|
let mut offset = 0i32;
|
|
let src = self.value_of(self.nodes[arg].inputs[1]);
|
|
debug_assert!(self.nodes[arg].kind == Kind::Load);
|
|
for &v in loc {
|
|
ir_args.push(self.bl.ins().load(
|
|
v.value_type,
|
|
MemFlags::new(),
|
|
src,
|
|
offset,
|
|
));
|
|
offset += align as i32;
|
|
}
|
|
} else {
|
|
let loc = params.take(..abi_meta.arg_count).unwrap();
|
|
debug_assert_eq!(loc.len(), 1);
|
|
ir_args.push(self.value_of(arg));
|
|
}
|
|
}
|
|
|
|
let inst = self.bl.ins().call(func_ref, &ir_args);
|
|
match *self.bl.inst_results(inst) {
|
|
[] => {}
|
|
[scala] if !self.tys.ins.funcs[func].sig.ret.is_aggregate(self.tys) => {
|
|
self.values[nid.index()] = Some(Ok(scala))
|
|
}
|
|
ref loc => {
|
|
assert!(!stack_ret);
|
|
let slot = self.value_of(*node.inputs.last().unwrap());
|
|
|
|
assert!(loc.len() <= 2, "NEED handling");
|
|
let align = loc
|
|
.iter()
|
|
.map(|&p| self.bl.func.dfg.value_type(p).bytes())
|
|
.max()
|
|
.unwrap();
|
|
let mut offset = 0i32;
|
|
#[allow(clippy::unnecessary_to_owned)]
|
|
for v in loc.to_vec() {
|
|
self.bl.ins().store(MemFlags::new(), v, slot, offset);
|
|
offset += align as i32;
|
|
}
|
|
}
|
|
}
|
|
for &o in node.outputs.iter().rev() {
|
|
if self.nodes[o].inputs[0] == nid
|
|
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
|
|
&& self.nodes[o].inputs[1] == nid)
|
|
{
|
|
self.emit_node(o, block);
|
|
}
|
|
}
|
|
return;
|
|
}
|
|
}
|
|
Kind::CInt { value } if self.nodes[nid].ty.is_float() => {
|
|
Ok(match self.tys.size_of(self.nodes[nid].ty) {
|
|
4 => self.bl.ins().f32const(f64::from_bits(value as _) as f32),
|
|
8 => self.bl.ins().f64const(f64::from_bits(value as _)),
|
|
_ => unimplemented!(),
|
|
})
|
|
}
|
|
Kind::CInt { value } => Ok(self.bl.ins().iconst(
|
|
cir::Type::int(self.tys.size_of(node.ty) as u16 * 8).unwrap_or_else(|| {
|
|
panic!("{}", hbty::Display::new(self.tys, self.files, node.ty),)
|
|
}),
|
|
value,
|
|
)),
|
|
Kind::BinOp { op } => {
|
|
let &[_, lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
|
|
let [lh, rh] = [self.value_of(lhs), self.value_of(rhs)];
|
|
|
|
let is_int_op = node.ty.is_integer()
|
|
|| node.ty.is_pointer()
|
|
|| (node.ty == hbty::Id::BOOL
|
|
&& (self.nodes[lhs].ty.is_integer()
|
|
|| self.nodes[lhs].ty.is_pointer()
|
|
|| self
|
|
.tys
|
|
.inner_of(self.nodes[lhs].ty)
|
|
.is_some_and(|v| v.is_pointer())
|
|
|| self.nodes[lhs].ty == hbty::Id::BOOL));
|
|
let is_float_op = node.ty.is_float()
|
|
|| (node.ty == hbty::Id::BOOL && self.nodes[lhs].ty.is_float());
|
|
|
|
Ok(if is_int_op {
|
|
let signed = node.ty.is_signed()
|
|
|| (op.is_comparison() && self.nodes[lhs].ty.is_signed());
|
|
match op {
|
|
TokenKind::Add => self.bl.ins().iadd(lh, rh),
|
|
TokenKind::Sub => self.bl.ins().isub(lh, rh),
|
|
TokenKind::Mul => self.bl.ins().imul(lh, rh),
|
|
TokenKind::Shl => self.bl.ins().ishl(lh, rh),
|
|
TokenKind::Xor => self.bl.ins().bxor(lh, rh),
|
|
TokenKind::Band => self.bl.ins().band(lh, rh),
|
|
TokenKind::Bor => self.bl.ins().bor(lh, rh),
|
|
|
|
TokenKind::Div if signed => self.bl.ins().sdiv(lh, rh),
|
|
TokenKind::Mod if signed => self.bl.ins().srem(lh, rh),
|
|
TokenKind::Shr if signed => self.bl.ins().sshr(lh, rh),
|
|
|
|
TokenKind::Div => self.bl.ins().udiv(lh, rh),
|
|
TokenKind::Mod => self.bl.ins().urem(lh, rh),
|
|
TokenKind::Shr => self.bl.ins().ushr(lh, rh),
|
|
|
|
TokenKind::Lt
|
|
| TokenKind::Gt
|
|
| TokenKind::Le
|
|
| TokenKind::Ge
|
|
| TokenKind::Eq
|
|
| TokenKind::Ne => self.bl.ins().icmp(op.to_int_cc(signed), lh, rh),
|
|
op => todo!("{op}"),
|
|
}
|
|
} else if is_float_op {
|
|
match op {
|
|
TokenKind::Add => self.bl.ins().fadd(lh, rh),
|
|
TokenKind::Sub => self.bl.ins().fsub(lh, rh),
|
|
TokenKind::Mul => self.bl.ins().fmul(lh, rh),
|
|
TokenKind::Div => self.bl.ins().fdiv(lh, rh),
|
|
|
|
TokenKind::Lt
|
|
| TokenKind::Gt
|
|
| TokenKind::Le
|
|
| TokenKind::Ge
|
|
| TokenKind::Eq
|
|
| TokenKind::Ne => self.bl.ins().fcmp(op.to_float_cc(), lh, rh),
|
|
op => todo!("{op}"),
|
|
}
|
|
} else {
|
|
todo!(
|
|
"{} {op} {}",
|
|
hbty::Display::new(self.tys, self.files, node.ty),
|
|
hbty::Display::new(self.tys, self.files, self.nodes[lhs].ty)
|
|
)
|
|
})
|
|
}
|
|
Kind::RetVal => Ok(self.value_of(node.inputs[0])),
|
|
Kind::UnOp { op } => {
|
|
let oper = self.value_of(node.inputs[1]);
|
|
let dst = node.ty;
|
|
let src = self
|
|
.tys
|
|
.inner_of(self.nodes[node.inputs[1]].ty)
|
|
.unwrap_or(self.nodes[node.inputs[1]].ty);
|
|
|
|
let dty = dst.to_clif(self.tys);
|
|
Ok(match op {
|
|
TokenKind::Sub => self.bl.ins().ineg(oper),
|
|
TokenKind::Not => self.bl.ins().icmp_imm(IntCC::Equal, oper, 0),
|
|
TokenKind::Float if dst.is_float() && src.is_unsigned() => {
|
|
self.bl.ins().fcvt_from_uint(dty, oper)
|
|
}
|
|
TokenKind::Float if dst.is_float() && src.is_signed() => {
|
|
self.bl.ins().fcvt_from_sint(dty, oper)
|
|
}
|
|
TokenKind::Number if src.is_float() && dst.is_unsigned() => {
|
|
self.bl.ins().fcvt_to_uint(dty, oper)
|
|
}
|
|
TokenKind::Number
|
|
if src.is_signed() && (dst.is_integer() || dst.is_pointer()) =>
|
|
{
|
|
self.bl.ins().sextend(dty, oper)
|
|
}
|
|
TokenKind::Number
|
|
if (src.is_unsigned() || src == hbty::Id::BOOL)
|
|
&& (dst.is_integer() || dst.is_pointer()) =>
|
|
{
|
|
self.bl.ins().uextend(dty, oper)
|
|
}
|
|
TokenKind::Float if dst == hbty::Id::F64 && src.is_float() => {
|
|
self.bl.ins().fpromote(dty, oper)
|
|
}
|
|
TokenKind::Float if dst == hbty::Id::F32 && src.is_float() => {
|
|
self.bl.ins().fdemote(dty, oper)
|
|
}
|
|
_ => todo!(),
|
|
})
|
|
}
|
|
Kind::Stck => {
|
|
let slot = self.bl.create_sized_stack_slot(cir::StackSlotData {
|
|
kind: cir::StackSlotKind::ExplicitSlot,
|
|
size: self.tys.size_of(node.ty),
|
|
align_shift: self.tys.align_of(node.ty).ilog2() as _,
|
|
});
|
|
|
|
Ok(self.bl.ins().stack_addr(cir::types::I64, slot, 0))
|
|
}
|
|
Kind::Global { global } => {
|
|
let glob_ref = {
|
|
// already deduplicated by the SoN
|
|
let colocated = true;
|
|
let user_name_ref =
|
|
self.bl.func.declare_imported_user_function(cir::UserExternalName {
|
|
namespace: 1,
|
|
index: global.index() as u32,
|
|
});
|
|
self.bl.func.create_global_value(cir::GlobalValueData::Symbol {
|
|
name: cir::ExternalName::user(user_name_ref),
|
|
offset: cir::immediates::Imm64::new(0),
|
|
colocated,
|
|
tls: false,
|
|
})
|
|
};
|
|
|
|
Ok(self.bl.ins().global_value(cir::types::I64, glob_ref))
|
|
}
|
|
Kind::Load if node.ty.is_aggregate(self.tys) => return,
|
|
Kind::Load => {
|
|
let ptr = self.value_of(node.inputs[1]);
|
|
let off = self.value_of(node.inputs[2]);
|
|
let ptr = self.bl.ins().iadd(ptr, off);
|
|
Ok(self.bl.ins().load(node.ty.to_clif(self.tys), MemFlags::new(), ptr, 0))
|
|
}
|
|
Kind::Stre if node.ty.is_aggregate(self.tys) => {
|
|
let src = self.value_of(self.nodes[node.inputs[1]].inputs[1]);
|
|
let dest = self.value_of(node.inputs[2]);
|
|
let off = self.value_of(node.inputs[3]);
|
|
let dest = self.bl.ins().iadd(dest, off);
|
|
self.bl.emit_small_memory_copy(
|
|
self.isa.frontend_config(),
|
|
dest,
|
|
src,
|
|
self.tys.size_of(node.ty) as _,
|
|
self.tys.align_of(node.ty) as _,
|
|
self.tys.align_of(node.ty) as _,
|
|
false,
|
|
MemFlags::new(),
|
|
);
|
|
return;
|
|
}
|
|
Kind::Stre => {
|
|
let value = self.value_of(node.inputs[1]);
|
|
let ptr = self.value_of(node.inputs[2]);
|
|
let off = self.value_of(node.inputs[3]);
|
|
let ptr = self.bl.ins().iadd(ptr, off);
|
|
self.bl.ins().store(MemFlags::new(), value, ptr, 0);
|
|
return;
|
|
}
|
|
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => return,
|
|
Kind::Assert { .. } => unreachable!(),
|
|
});
|
|
}
|
|
|
|
fn ensure_truncated(&mut self, returns: cir::AbiParam, arg: hbnodes::Nid) {
|
|
let value = self.value_of(arg);
|
|
if self.bl.func.dfg.value_type(value) != returns.value_type {
|
|
self.values[arg.index()] = Some(Ok(self.bl.ins().ireduce(returns.value_type, value)));
|
|
}
|
|
}
|
|
}
|
|
|
|
trait ToCondcodes {
|
|
fn to_int_cc(self, signed: bool) -> condcodes::IntCC;
|
|
fn to_float_cc(self) -> condcodes::FloatCC;
|
|
}
|
|
|
|
impl ToCondcodes for TokenKind {
|
|
fn to_int_cc(self, signed: bool) -> condcodes::IntCC {
|
|
use condcodes::IntCC as ICC;
|
|
match self {
|
|
Self::Lt if signed => ICC::SignedLessThan,
|
|
Self::Gt if signed => ICC::SignedGreaterThan,
|
|
Self::Le if signed => ICC::SignedLessThanOrEqual,
|
|
Self::Ge if signed => ICC::SignedGreaterThanOrEqual,
|
|
|
|
Self::Lt => ICC::UnsignedLessThan,
|
|
Self::Gt => ICC::UnsignedGreaterThan,
|
|
Self::Le => ICC::UnsignedLessThanOrEqual,
|
|
Self::Ge => ICC::UnsignedGreaterThanOrEqual,
|
|
|
|
Self::Eq => ICC::Equal,
|
|
Self::Ne => ICC::NotEqual,
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
|
|
fn to_float_cc(self) -> condcodes::FloatCC {
|
|
use condcodes::FloatCC as FCC;
|
|
match self {
|
|
Self::Lt => FCC::LessThan,
|
|
Self::Gt => FCC::GreaterThan,
|
|
Self::Le => FCC::LessThanOrEqual,
|
|
Self::Ge => FCC::GreaterThanOrEqual,
|
|
Self::Eq => FCC::Equal,
|
|
Self::Ne => FCC::NotEqual,
|
|
_ => unreachable!(),
|
|
}
|
|
}
|
|
}
|
|
|
|
trait ToClifTy {
|
|
fn to_clif(self, cx: &hbty::Types) -> cir::Type;
|
|
}
|
|
|
|
impl ToClifTy for hbty::Id {
|
|
fn to_clif(self, cx: &hbty::Types) -> cir::Type {
|
|
debug_assert!(!self.is_aggregate(cx));
|
|
if self.is_integer() | self.is_pointer() | self.is_optional() || self == hbty::Id::BOOL {
|
|
cir::Type::int(cx.size_of(self) as u16 * 8).unwrap()
|
|
} else if self == hbty::Id::F32 {
|
|
cir::types::F32
|
|
} else if self == hbty::Id::F64 {
|
|
cir::types::F64
|
|
} else {
|
|
unimplemented!("{:?}", self)
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Default)]
|
|
struct Global {
|
|
module_id: Option<cm::DataId>,
|
|
}
|
|
|
|
#[derive(Default)]
|
|
struct FuncHeaders {
|
|
module_id: Option<cm::FuncId>,
|
|
alignment: u32,
|
|
code: Range<u32>,
|
|
relocs: Range<u32>,
|
|
external_names: Range<u32>,
|
|
}
|
|
|
|
#[derive(Default)]
|
|
struct Functions {
|
|
headers: EntVec<hbty::Func, FuncHeaders>,
|
|
code: Vec<u8>,
|
|
relocs: Vec<FinalizedMachReloc>,
|
|
external_names: Vec<UserExternalName>,
|
|
}
|
|
|
|
impl Functions {
|
|
fn push(&mut self, id: hbty::Func, func: &cir::Function, code: &MachBufferFinalized<Final>) {
|
|
self.headers.shadow(id.index() + 1);
|
|
self.headers[id] = FuncHeaders {
|
|
module_id: None,
|
|
alignment: code.alignment,
|
|
code: self.code.len() as u32..self.code.len() as u32 + code.data().len() as u32,
|
|
relocs: self.relocs.len() as u32..self.relocs.len() as u32 + code.relocs().len() as u32,
|
|
external_names: self.external_names.len() as u32
|
|
..self.external_names.len() as u32 + func.params.user_named_funcs().len() as u32,
|
|
};
|
|
self.code.extend(code.data());
|
|
self.relocs.extend(code.relocs().iter().cloned());
|
|
self.external_names.extend(func.params.user_named_funcs().values().cloned());
|
|
}
|
|
}
|
|
|
|
#[derive(Default)]
|
|
struct Assembler {
|
|
name: String,
|
|
frontier: Vec<hbty::Id>,
|
|
globals: Vec<hbty::Global>,
|
|
funcs: Vec<hbty::Func>,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub enum BackendCreationError {
|
|
UnsupportedTriplet(LookupError),
|
|
InvalidFlags(CodegenError),
|
|
UnsupportedModuleConfig(ModuleError),
|
|
InvalidFlag { key: String, value: String, err: SetError },
|
|
}
|
|
|
|
impl Display for BackendCreationError {
|
|
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
|
match self {
|
|
BackendCreationError::UnsupportedTriplet(err) => {
|
|
write!(f, "Unsupported triplet: {}", err)
|
|
}
|
|
BackendCreationError::InvalidFlags(err) => {
|
|
write!(f, "Invalid flags: {}", err)
|
|
}
|
|
BackendCreationError::UnsupportedModuleConfig(err) => {
|
|
write!(f, "Unsupported module configuration: {}", err)
|
|
}
|
|
BackendCreationError::InvalidFlag { key, value, err } => {
|
|
write!(
|
|
f,
|
|
"Problem setting a '{key}' to '{value}': {err}\navailable flags: {}",
|
|
cc::settings::Flags::new(cc::settings::builder())
|
|
)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
impl core::error::Error for BackendCreationError {}
|
|
|
|
impl From<LookupError> for BackendCreationError {
|
|
fn from(value: LookupError) -> Self {
|
|
Self::UnsupportedTriplet(value)
|
|
}
|
|
}
|
|
|
|
impl From<CodegenError> for BackendCreationError {
|
|
fn from(value: CodegenError) -> Self {
|
|
Self::InvalidFlags(value)
|
|
}
|
|
}
|
|
|
|
impl From<ModuleError> for BackendCreationError {
|
|
fn from(value: ModuleError) -> Self {
|
|
Self::UnsupportedModuleConfig(value)
|
|
}
|
|
}
|