add new ableos path resolver, separate platform independent code

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
Jakub Doka 2024-11-30 18:57:29 +01:00
parent d368ac023b
commit 19aca050ed
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
10 changed files with 306 additions and 221 deletions

View file

@ -2,3 +2,4 @@
--fmt-stdout - dont write the formatted file but print it
--dump-asm - output assembly instead of raw code, (the assembly is more for debugging the compiler)
--threads <1...> - number of extra threads compiler can use [default: 0]
--path-resolver <name> - choose between builtin path resolvers, options are: ableos

View file

@ -41,13 +41,16 @@ pub struct Options<'a> {
pub fmt: bool,
pub fmt_stdout: bool,
pub dump_asm: bool,
pub in_house_regalloc: bool,
pub extra_threads: usize,
pub resolver: Option<PathResolver<'a>>,
}
impl Options<'static> {
pub fn from_args(args: &[&str], out: &mut Vec<u8>) -> std::io::Result<Self> {
impl<'a> Options<'a> {
pub fn from_args(
args: &[&str],
out: &mut Vec<u8>,
resolvers: &'a [(&str, PathResolver)],
) -> std::io::Result<Self> {
if args.contains(&"--help") || args.contains(&"-h") {
writeln!(out, "Usage: hbc [OPTIONS...] <FILE>")?;
writeln!(out, include_str!("../command-help.txt"))?;
@ -58,7 +61,6 @@ impl Options<'static> {
fmt: args.contains(&"--fmt"),
fmt_stdout: args.contains(&"--fmt-stdout"),
dump_asm: args.contains(&"--dump-asm"),
in_house_regalloc: args.contains(&"--in-house-regalloc"),
extra_threads: args
.iter()
.position(|&a| a == "--threads")
@ -72,7 +74,27 @@ impl Options<'static> {
.transpose()?
.map_or(1, NonZeroUsize::get)
- 1,
..Default::default()
resolver: args
.iter()
.position(|&a| a == "--path-resolver")
.map(|i| {
resolvers.iter().find(|&&(n, _)| args[i + 1] == n).map(|&(_, r)| r).ok_or_else(
|| {
writeln!(
out,
"--path-resolver can only be one of: {}",
resolvers
.iter()
.map(|&(n, _)| n)
.intersperse(", ")
.collect::<String>()
)
.err()
.unwrap_or(std::io::ErrorKind::Other.into())
},
)
})
.transpose()?,
})
}
}
@ -107,7 +129,6 @@ pub fn run_compiler(
write!(out, "{}", &parsed.ast[0])?;
} else {
let mut backend = HbvmBackend::default();
backend.use_in_house_regalloc = options.in_house_regalloc;
let mut ctx = crate::son::CodegenCtx::default();
*ctx.parser.errors.get_mut() = parsed.errors;

View file

@ -100,15 +100,6 @@ mod debug {
}
}
pub mod reg {
pub const STACK_PTR: Reg = 254;
pub const ZERO: Reg = 0;
pub const RET: Reg = 1;
pub const RET_ADDR: Reg = 31;
pub type Reg = u8;
}
mod ctx_map {
use core::hash::BuildHasher;
@ -841,12 +832,6 @@ enum CompState {
Compiled,
}
#[derive(Clone, Copy)]
struct TypedReloc {
target: ty::Id,
reloc: Reloc,
}
#[derive(Clone, Default)]
struct Global {
file: Module,
@ -863,33 +848,6 @@ pub struct Const {
parent: ty::Id,
}
// TODO: make into bit struct (width: u2, sub_offset: u3, offset: u27)
#[derive(Clone, Copy, Debug)]
struct Reloc {
offset: Offset,
sub_offset: u8,
width: u8,
}
impl Reloc {
fn new(offset: usize, sub_offset: u8, width: u8) -> Self {
Self { offset: offset as u32, sub_offset, width }
}
fn apply_jump(mut self, code: &mut [u8], to: u32, from: u32) -> i64 {
self.offset += from;
let offset = to as i64 - self.offset as i64;
self.write_offset(code, offset);
offset
}
fn write_offset(&self, code: &mut [u8], offset: i64) {
let bytes = offset.to_ne_bytes();
let slice = &mut code[self.offset as usize + self.sub_offset as usize..];
slice[..self.width as usize].copy_from_slice(&bytes[..self.width as usize]);
}
}
struct EnumField {
name: Ident,
}
@ -941,26 +899,6 @@ impl Array {
}
}
#[derive(Clone, Copy)]
enum PLoc {
Reg(u8, u16),
WideReg(u8, u16),
Ref(u8, u32),
}
struct ParamAlloc(Range<u8>);
impl ParamAlloc {
pub fn next(&mut self, ty: ty::Id, tys: &Types) -> Option<PLoc> {
Some(match tys.size_of(ty) {
0 => return None,
size @ 1..=8 => PLoc::Reg(self.0.next().unwrap(), size as _),
size @ 9..=16 => PLoc::WideReg(self.0.next_chunk::<2>().unwrap()[0], size as _),
size @ 17.. => PLoc::Ref(self.0.next().unwrap(), size),
})
}
}
impl ctx_map::CtxEntry for Ident {
type Ctx = str;
type Key<'a> = &'a str;
@ -1109,13 +1047,6 @@ impl Types {
start..end
}
fn parama(&self, ret: impl Into<ty::Id>) -> (Option<PLoc>, ParamAlloc) {
let mut iter = ParamAlloc(1..12);
let ret = iter.next(ret.into(), self);
iter.0.start += ret.is_none() as u8;
(ret, iter)
}
fn make_opt(&mut self, base: ty::Id) -> ty::Id {
self.make_generic_ty(Opt { base }, |ins| &mut ins.opts, |e| SymKey::Optional(e))
}

View file

@ -1,12 +1,43 @@
#[cfg(feature = "std")]
fn main() {
use std::io::Write;
use std::{
io::Write,
path::{Path, PathBuf},
};
static ABLEOS_PATH_RESOLVER: hblang::PathResolver =
&|mut path: &str, mut from: &str, tmp: &mut PathBuf| {
tmp.clear();
path = match path {
"stn" => {
from = "";
"./sysdata/libraries/stn/src/lib.hb"
}
_ => path,
};
match path.split_once(':') {
Some(("lib", p)) => tmp.extend(["./sysdata/libraries", p, "src/lib.hb"]),
Some(("stn", p)) => {
tmp.extend(["./sysdata/libraries/stn/src", &(p.to_owned() + ".hb")])
}
Some(("sysdata", p)) => tmp.extend(["./sysdata", p]),
None => match Path::new(from).parent() {
Some(parent) => tmp.extend([parent, Path::new(path)]),
None => tmp.push(path),
},
_ => panic!("path: '{path}' is invalid: unexpected ':'"),
};
tmp.canonicalize()
.map_err(|source| hblang::CantLoadFile { path: std::mem::take(tmp), source })
};
fn run(out: &mut Vec<u8>, warnings: &mut String) -> std::io::Result<()> {
let args = std::env::args().collect::<Vec<_>>();
let args = args.iter().map(String::as_str).collect::<Vec<_>>();
let opts = hblang::Options::from_args(&args, out)?;
let resolvers = &[("ableos", ABLEOS_PATH_RESOLVER)];
let opts = hblang::Options::from_args(&args, out, resolvers)?;
let file = args.iter().filter(|a| !a.starts_with('-')).nth(1).copied().unwrap_or("main.hb");
hblang::run_compiler(file, opts, out, warnings)

View file

@ -297,6 +297,120 @@ impl Nodes {
}
}
fn schedule_inside_blocks(
&mut self,
cfg_nodes: &mut Vec<Nid>,
buf: &mut Vec<Nid>,
seen: &mut BitSet,
) {
debug_assert!(cfg_nodes.is_empty());
debug_assert!(buf.is_empty());
cfg_nodes.extend(
self.iter()
// skip VOID and NEVER
.skip(2)
.filter(|(_, n)| n.kind.is_cfg() && !n.kind.ends_basic_block())
.map(|(n, _)| n),
);
for &block in &*cfg_nodes {
seen.clear(self.values.len());
let mut outputs = mem::take(&mut self[block].outputs);
self.reschedule_block(block, &mut outputs, buf, seen);
self[block].outputs = outputs;
}
cfg_nodes.clear();
}
fn reschedule_block(
&self,
from: Nid,
outputs: &mut [Nid],
buf: &mut Vec<Nid>,
seen: &mut BitSet,
) {
debug_assert!(buf.is_empty());
// NOTE: this code is horible
let fromc = Some(&from);
let cfg_idx = outputs.iter().position(|&n| self.is_cfg(n)).unwrap();
outputs.swap(cfg_idx, 0);
for &o in outputs.iter() {
if (!self.is_cfg(o)
&& self[o].outputs.iter().any(|&oi| {
self[oi].kind != Kind::Phi && self[oi].inputs.first() == fromc && !seen.get(oi)
}))
|| !seen.set(o)
{
continue;
}
let mut cursor = buf.len();
for &o in outputs.iter().filter(|&&n| n == o) {
buf.push(o);
}
while let Some(&n) = buf.get(cursor) {
for &i in &self[n].inputs[1..] {
if fromc == self[i].inputs.first()
&& self[i].outputs.iter().all(|&o| {
self[o].kind == Kind::Phi
|| self[o].inputs.first() != fromc
|| seen.get(o)
})
&& seen.set(i)
{
for &o in outputs.iter().filter(|&&n| n == i) {
buf.push(o);
}
}
}
cursor += 1;
}
}
debug_assert_eq!(
outputs.iter().filter(|&&n| !seen.get(n)).copied().collect::<Vec<_>>(),
vec![],
"{:?} {from:?} {:?}",
outputs
.iter()
.filter(|&&n| !seen.get(n))
.copied()
.map(|n| (n, &self[n]))
.collect::<Vec<_>>(),
self[from]
);
let bf = &buf;
debug_assert_eq!(
bf.iter()
.enumerate()
.filter(|(_, &b)| !self[b].kind.is_pinned())
.flat_map(|(i, &b)| self[b]
.inputs
.iter()
.filter(|&&b| !self[b].kind.is_pinned())
.filter_map(move |&inp| bf
.iter()
.position(|&n| inp == n)
.filter(|&j| i > j)
.map(|j| (bf[i], bf[j]))))
.collect::<Vec<_>>(),
vec![],
"{:?}",
bf
);
debug_assert!(self.is_cfg(bf[0]) || self[bf[0]].kind == Kind::Phi, "{:?}", self[bf[0]]);
if outputs.len() != buf.len() {
panic!("{:?} {:?}", outputs, buf);
}
outputs.copy_from_slice(buf);
buf.clear();
}
fn push_down(
&self,
node: Nid,
@ -623,6 +737,9 @@ impl Nodes {
}
scratch.clear();
visited.clear(self.values.len());
self.schedule_inside_blocks(bind_buf, scratch, visited);
}
fn clear(&mut self) {
@ -2410,11 +2527,6 @@ impl ItemCtx {
}
}
fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) {
let value = value.to_ne_bytes();
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
}
#[derive(Default, Debug)]
struct Ctx {
ty: Option<ty::Id>,

View file

@ -3,19 +3,66 @@ use {
crate::{
lexer::TokenKind,
parser,
reg::{self, Reg},
son::{debug_assert_matches, write_reloc, Kind, MEM},
son::{debug_assert_matches, Kind, MEM},
ty::{self, Arg, Loc, Module},
utils::{BitSet, Ent, EntVec, Vc},
Offset, PLoc, Reloc, Sig, Size, TypedReloc, Types,
utils::{Ent, EntVec},
Offset, Sig, Size, Types,
},
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
core::mem,
core::{mem, ops::Range},
hbbytecode::{self as instrs, *},
reg::Reg,
};
mod regalloc;
mod reg {
pub const STACK_PTR: Reg = 254;
pub const ZERO: Reg = 0;
pub const RET: Reg = 1;
pub const RET_ADDR: Reg = 31;
pub type Reg = u8;
}
fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) {
let value = value.to_ne_bytes();
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
}
#[derive(Clone, Copy)]
struct TypedReloc {
target: ty::Id,
reloc: Reloc,
}
// TODO: make into bit struct (width: u2, sub_offset: u3, offset: u27)
#[derive(Clone, Copy, Debug)]
struct Reloc {
offset: Offset,
sub_offset: u8,
width: u8,
}
impl Reloc {
fn new(offset: usize, sub_offset: u8, width: u8) -> Self {
Self { offset: offset as u32, sub_offset, width }
}
fn apply_jump(mut self, code: &mut [u8], to: u32, from: u32) -> i64 {
self.offset += from;
let offset = to as i64 - self.offset as i64;
self.write_offset(code, offset);
offset
}
fn write_offset(&self, code: &mut [u8], offset: i64) {
let bytes = offset.to_ne_bytes();
let slice = &mut code[self.offset as usize + self.sub_offset as usize..];
slice[..self.width as usize].copy_from_slice(&bytes[..self.width as usize]);
}
}
struct FuncDt {
offset: Offset,
// TODO: change to indices into common vec
@ -48,8 +95,6 @@ struct Assembler {
#[derive(Default)]
pub struct HbvmBackend {
pub use_in_house_regalloc: bool,
funcs: EntVec<ty::Func, FuncDt>,
globals: EntVec<ty::Global, GlobalDt>,
asm: Assembler,
@ -355,89 +400,6 @@ impl Nodes {
}
}
fn reschedule_block(&self, from: Nid, outputs: &mut Vc) {
// NOTE: this code is horible
let fromc = Some(&from);
let mut buf = Vec::with_capacity(outputs.len());
let mut seen = BitSet::default();
seen.clear(self.values.len());
let cfg_idx = outputs.iter().position(|&n| self.is_cfg(n)).unwrap();
outputs.swap(cfg_idx, 0);
for &o in outputs.iter() {
if (!self.is_cfg(o)
&& self[o].outputs.iter().any(|&oi| {
self[oi].kind != Kind::Phi && self[oi].inputs.first() == fromc && !seen.get(oi)
}))
|| !seen.set(o)
{
continue;
}
let mut cursor = buf.len();
for &o in outputs.iter().filter(|&&n| n == o) {
buf.push(o);
}
while let Some(&n) = buf.get(cursor) {
for &i in &self[n].inputs[1..] {
if fromc == self[i].inputs.first()
&& self[i].outputs.iter().all(|&o| {
self[o].kind == Kind::Phi
|| self[o].inputs.first() != fromc
|| seen.get(o)
})
&& seen.set(i)
{
for &o in outputs.iter().filter(|&&n| n == i) {
buf.push(o);
}
}
}
cursor += 1;
}
}
debug_assert_eq!(
outputs.iter().filter(|&&n| !seen.get(n)).copied().collect::<Vec<_>>(),
vec![],
"{:?} {from:?} {:?}",
outputs
.iter()
.filter(|&&n| !seen.get(n))
.copied()
.map(|n| (n, &self[n]))
.collect::<Vec<_>>(),
self[from]
);
let bf = &buf;
debug_assert_eq!(
bf.iter()
.enumerate()
.filter(|(_, &b)| !self[b].kind.is_pinned())
.flat_map(|(i, &b)| self[b]
.inputs
.iter()
.filter(|&&b| !self[b].kind.is_pinned())
.filter_map(move |&inp| bf
.iter()
.position(|&n| inp == n)
.filter(|&j| i > j)
.map(|j| (bf[i], bf[j]))))
.collect::<Vec<_>>(),
vec![],
"{:?}",
bf
);
debug_assert!(self.is_cfg(bf[0]) || self[bf[0]].kind == Kind::Phi, "{:?}", self[bf[0]]);
if outputs.len() != buf.len() {
panic!("{:?} {:?}", outputs, buf);
}
outputs.copy_from_slice(&buf);
}
fn is_never_used(&self, nid: Nid, tys: &Types) -> bool {
let node = &self[nid];
match node.kind {
@ -919,6 +881,35 @@ impl TokenKind {
}
}
#[derive(Clone, Copy)]
enum PLoc {
Reg(Reg, u16),
WideReg(Reg, u16),
Ref(Reg, u32),
}
struct ParamAlloc(Range<Reg>);
impl ParamAlloc {
pub fn next(&mut self, ty: ty::Id, tys: &Types) -> Option<PLoc> {
Some(match tys.size_of(ty) {
0 => return None,
size @ 1..=8 => PLoc::Reg(self.0.next().unwrap(), size as _),
size @ 9..=16 => PLoc::WideReg(self.0.next_chunk::<2>().unwrap()[0], size as _),
size @ 17.. => PLoc::Ref(self.0.next().unwrap(), size),
})
}
}
impl Types {
fn parama(&self, ret: ty::Id) -> (Option<PLoc>, ParamAlloc) {
let mut iter = ParamAlloc(1..12);
let ret = iter.next(ret, self);
iter.0.start += ret.is_none() as u8;
(ret, iter)
}
}
type EncodedInstr = (usize, [u8; instrs::MAX_SIZE]);
fn emit(out: &mut Vec<u8>, (len, instr): EncodedInstr) {
out.extend_from_slice(&instr[..len]);

View file

@ -1,12 +1,14 @@
use {
super::{HbvmBackend, Nid, Nodes},
crate::{
parser,
reg::{self, Reg},
son::{debug_assert_matches, Kind, ARG_START, MEM, VOID},
son::{
debug_assert_matches,
hbvm::{reg, reg::Reg, HbvmBackend, Nid, Nodes, PLoc},
Kind, ARG_START, MEM, VOID,
},
ty::{self, Arg, Loc},
utils::BitSet,
PLoc, Sig, Types,
Sig, Types,
},
alloc::{borrow::ToOwned, vec::Vec},
core::{mem, ops::Range},
@ -415,7 +417,7 @@ impl<'a> Function<'a> {
return;
}
let mut node = self.nodes[nid].clone();
let node = &self.nodes[nid];
match node.kind {
Kind::Start => {
debug_assert_matches!(self.nodes[node.outputs[0]].kind, Kind::Entry);
@ -441,8 +443,7 @@ impl<'a> Function<'a> {
Kind::Region | Kind::Loop => {
self.close_block(nid);
self.add_block(nid);
self.nodes.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
for &o in node.outputs.iter().rev() {
self.emit_node(o);
}
}
@ -469,15 +470,13 @@ impl<'a> Function<'a> {
}
}
self.nodes.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
for &o in node.outputs.iter().rev() {
self.emit_node(o);
}
}
Kind::Then | Kind::Else => {
self.add_block(nid);
self.nodes.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
for &o in node.outputs.iter().rev() {
self.emit_node(o);
}
}
@ -486,8 +485,7 @@ impl<'a> Function<'a> {
self.add_instr(nid);
self.nodes.reschedule_block(nid, &mut node.outputs);
for o in node.outputs.into_iter().rev() {
for &o in node.outputs.iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
&& self.nodes[o].inputs[1] == nid)

View file

@ -15,16 +15,16 @@ main:
ADDI64 r254, r254, 24d
JALA r0, r31, 0a
str_len:
CP r15, r2
CP r14, r0
CP r13, r14
2: LD r16, r15, 0a, 1h
CP r13, r2
CP r15, r0
CP r14, r15
2: LD r16, r13, 0a, 1h
ANDI r16, r16, 255d
JNE r16, r14, :0
CP r1, r13
JNE r16, r15, :0
CP r1, r14
JMP :1
0: ADDI64 r15, r15, 1d
ADDI64 r13, r13, 1d
0: ADDI64 r13, r13, 1d
ADDI64 r14, r14, 1d
JMP :2
1: JALA r0, r31, 0a
code size: 216

View file

@ -11,29 +11,29 @@ main:
ADDI64 r254, r254, -56d
ST r31, r254, 0a, 56h
JAL r31, r0, :check_platform
CP r33, r0
CP r35, r0
LI64 r36, 30d
LI64 r37, 100d
CP r35, r33
CP r34, r33
CP r32, r33
5: JLTU r32, r36, :0
ADDI64 r34, r34, 1d
CP r2, r33
CP r3, r34
CP r34, r35
CP r33, r35
CP r32, r35
5: JLTU r34, r36, :0
ADDI64 r32, r32, 1d
CP r2, r35
CP r3, r32
CP r4, r36
JAL r31, r0, :set_pixel
CP r32, r1
JEQ r32, r35, :1
CP r1, r33
JMP :2
1: JNE r34, r37, :3
CP r34, r1
JEQ r34, r33, :1
CP r1, r35
JMP :2
3: CP r32, r33
1: JNE r32, r37, :3
CP r1, r33
JMP :2
3: CP r34, r35
JMP :4
0: ADDI64 r35, r35, 1d
ADDI64 r32, r32, 1d
0: ADDI64 r33, r33, 1d
ADDI64 r34, r34, 1d
4: JMP :5
2: LD r31, r254, 0a, 56h
ADDI64 r254, r254, 56d

View file

@ -10,21 +10,21 @@ main:
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
sqrt:
CP r13, r2
CP r14, r2
LI64 r16, 15d
LI64 r15, 32768d
CP r17, r0
CP r14, r17
CP r13, r17
3: JNE r15, r17, :0
CP r1, r14
CP r1, r13
JMP :1
0: SLUI64 r18, r14, 1b
0: SLUI64 r18, r13, 1b
ADDI64 r16, r16, -1d
ADD64 r18, r18, r15
SLU64 r18, r18, r16
JLTU r13, r18, :2
SUB64 r13, r13, r18
ADD64 r14, r15, r14
JLTU r14, r18, :2
SUB64 r14, r14, r18
ADD64 r13, r15, r13
JMP :2
2: SRUI64 r15, r15, 1b
JMP :3