Compare commits

..

34 commits
trunk ... trunk

Author SHA1 Message Date
Jakub Doka 3491814b4f
i am tired
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 21:50:03 +01:00
Jakub Doka ee434e6135
more forgotten stuff
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 21:43:53 +01:00
Jakub Doka 9afe191bca
fixed a missing feature
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 21:41:28 +01:00
Jakub Doka 8ededb8612
adding standard instruction logging utility
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 21:33:15 +01:00
Jakub Doka 9c4b84ce33
fixing the precedence regarding slice ranges
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 20:43:49 +01:00
Jakub Doka 5909837015
making sure sliced pointer is loaded
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 18:53:52 +01:00
Jakub Doka 9f67b22aa2
maybe now the bug is fixed
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 17:01:36 +01:00
Jakub Doka 939d0807fb
fixing slice slicing
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-22 16:18:44 +01:00
Jakub Doka 888b38ad4c
frgot to transition @nameof to slices
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-21 23:55:02 +01:00
Jakub Doka 5275a7e0fd
adding slices
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-21 23:44:33 +01:00
Jakub Doka 418fd0039e
making the pointered arrays work properly
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-21 17:36:42 +01:00
Jakub Doka d220823d78
fixing the bug in previous commit
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-21 17:14:33 +01:00
Jakub Doka af19f4e30d
making the identifiers accessible if they are captured
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-21 14:21:58 +01:00
Jakub Doka 1621d93e86
adding more stuff to the blog
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-20 22:54:46 +01:00
Jakub Doka 4b3b6af70e
adding habdler for tuples with known type
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-20 11:59:21 +01:00
Jakub Doka f59c0c1092
some syntax changes
mainly added the explicit type for declarations

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-20 11:32:18 +01:00
Jakub Doka 8ad58ee6b6
changing the array sintax to be distinc from tuple
the arrays can be declared in a more natural way
type of the element can aso be inferred form the first element

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-20 10:17:34 +01:00
Jakub Doka 6e8eb059f6
adding tuples
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-19 23:08:36 +01:00
Jakub Doka 969ea57e3f
optimizing the bitset used in register allocation
also fixing an enum bug

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-19 19:43:30 +01:00
Jakub Doka cfd3eac0a8
making the instruction scheduling smarter
the instructions that are only depended by phis are pushed to the end of
the block, which usually saves copy instructions

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-19 11:00:19 +01:00
Jakub Doka a8aba7e7c2
making the Call node less special
return value is now a separate node pinned to the call

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-19 10:48:05 +01:00
Jakub Doka f05c61a99e
adding @ChildOf directive
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-18 00:06:57 +01:00
Jakub Doka e769fa8dba
removing error for needless @as temporarly
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 22:29:16 +01:00
Jakub Doka b3f858f64b
adding @error directive
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 21:35:47 +01:00
Jakub Doka 1584ec7563
adding @Any directive
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 21:11:02 +01:00
Jakub Doka 6085177982
fixed the unreachable functions deleting branches
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 20:53:13 +01:00
Jakub Doka 47014c6164
lifting the restriction for inlining to allow normal functions as well
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 19:32:30 +01:00
Jakub Doka 3702a99d03
fixing another incorrect file reporting
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 19:30:42 +01:00
Jakub Doka 248bdf003a
making the else branch have less priority
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 19:10:56 +01:00
Jakub Doka d3f3fe98e3
propagating unreachable for functions returning never type
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 19:08:53 +01:00
Jakub Doka 14cf5efaa5
handling comptime known match
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 19:01:01 +01:00
Jakub Doka 95496116b0
making @len work on strings
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 18:30:19 +01:00
Jakub Doka 86f7d70747
adding default values to struct fields and @kindof directive
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 17:51:14 +01:00
Jakub Doka 0516ce68f4
adding @nameof
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
2024-12-17 16:46:43 +01:00
49 changed files with 2052 additions and 881 deletions

View file

@ -5,6 +5,6 @@ edition = "2018"
[features]
default = ["disasm"]
std = []
disasm = ["std"]
disasm = ["alloc"]
alloc = []

View file

@ -98,6 +98,27 @@ fn gen_instrs(generated: &mut String) -> Result<(), Box<dyn std::error::Error>>
writeln!(generated, " {name} = {id},")?;
}
writeln!(generated, "}}")?;
writeln!(generated, "impl {instr} {{")?;
writeln!(generated, " pub fn size(self) -> usize {{")?;
writeln!(generated, " match self {{")?;
let mut instrs = instructions().collect::<Vec<_>>();
instrs.sort_unstable_by_key(|&[.., ty, _]| iter_args(ty).map(arg_to_width).sum::<usize>());
for group in instrs.chunk_by(|[.., a, _], [.., b, _]| {
iter_args(a).map(arg_to_width).sum::<usize>()
== iter_args(b).map(arg_to_width).sum::<usize>()
}) {
let ty = group[0][2];
for &[_, name, ..] in group {
writeln!(generated, " | {instr}::{name}")?;
}
generated.pop();
let size = iter_args(ty).map(arg_to_width).sum::<usize>() + 1;
writeln!(generated, " => {size},")?;
}
writeln!(generated, " }}")?;
writeln!(generated, " }}")?;
writeln!(generated, "}}")?;
}
'_arg_kind: {

View file

@ -254,8 +254,7 @@ pub fn disasm<'a>(
|| global_offset > off + len
|| prev
.get(global_offset as usize)
.map_or(true, |&b| instr_from_byte(b).is_err())
|| prev[global_offset as usize] == 0;
.is_none_or(|&b| instr_from_byte(b).is_err());
has_oob |= local_has_oob;
let label = labels.get(&global_offset).unwrap();
if local_has_oob {

View file

@ -1,4 +1,4 @@
# The journey to optimizing compiler
# The journey to an optimizing compiler
It's been years since I was continuously trying to make a compiler to implement language of my dreams. Problem was tho that I wanted something similar to Rust, which if you did not know, `rustc` far exceeded the one million lines of code mark some time ago, so implementing such language would take me years if not decades, but I still tired it.
@ -28,4 +28,34 @@ It took around 4 months to reimplement everything make make the optimal code loo
## How my understanding of optimizations changed
I need to admit, before writing a single pass compiler and later upgrading it to optimizing one, I took optimizations as some magic that makes code faster and honestly believed they are optional and most of the hard work is done in the process of translating readable text to the machine code. That is almost true with minus the readable part. If you want the code you write to perform well, with a compiler that translates your code from text to instructions as its written, you will be forced to do everything modern optimizers do, by hand in your code. TODO...
### Optimizations allow us to scale software
I need to admit, before writing a single pass compiler and later upgrading it to optimizing one, I thought optimizations only affect the quality of final assembly emitted by the compiler. It never occur to me that what the optimizations actually do, is reduce the impact of how you decide to write the code. In a single pass compiler (with zero optimizations), the machine code reflects:
- order of operations as written in code
- whether the value was stored in intermediate locations
- exact structure of the control flow and at which point the operations are placed
- how many times is something recomputed
- operations that only help to convey intent for the reader of the source code
- and more I can't think of...
If you took some code you wrote and then modified it to obfuscate these aspects (in reference to the original code), you would to a subset of what optimizing compiler does. Of course, a good compiler would try hard to improve the metrics its optimizing for, it would:
- reorder operations to allow the CPU to parallelize them
- remove needless stores, or store values directly to places you cant express in code
- pull operations out of the loops and into the branches (if it can)
- find all common sub-expressions and compute them only once
- fold constants as much as possible and use obscure tricks to replace slow instructions if any of the operands are constant
- and more...
In the end, compiler optimizations try to reduce correlation between how the code happens to be written and how well it performs, which is extremely important when you want humans to read the code.
### Optimizing compilers know more then you
Optimizing code is a search problem, an optimizer searches the code for patterns that can be rewritten so something more practical for the computer, while preserving the observable behavior of the program. This means it needs enough context about the code to not make a mistake. In fact, the optimizer has so much context, it is able to determine your code is useless. But wait, didn't you write the code because you needed it to do something? Maybe your intention was to break out of the loop after you are done, but the optimizer looked at the code and said, "great, we are so lucky that this integer is always small enough to miss this check by one, DELETE", and then he goes "jackpot, since this loop is now infinite, we don't need this code after it, DELETE". Notice that the optimizer is eager to delete dead code, it did not ask you "Brah, why did you place all your code after an infinite loop?". This is just an example, there are many more cases where modern optimizers just delete all your code because they proven it does something invalid without running it.
Its stupid but its the world we live in, optimizers are usually a black box you import and feed it the code in a format they understand, they then proceed to optimize it, and if they find a glaring bug they wont tell you, god forbid, they will just molest the code in unspecified ways and spit out whats left. Before writing an optimizer, I did no know this can happen and I did not know this is a problem I pay for with my time, spent figuring out why noting is happening when I run the program.
But wait its worse! Since optimizers wont ever share the fact you are stupid, we end up with other people painstakingly writing complex linters, that will do a shitty job detecting things that matter, and instead whine about style and other bullcrap (and they suck even at that). If the people who write linters and people who write optimizers swapped the roles, I would be ranting about optimizers instead.
And so, this is the area where I want to innovate, lets report the dead code to the frontend, and let the compiler frontend filter out the noise and show relevant information in the diagnostics. Refuse to compile the program if you `i /= 0`. Refuse to compile if you `arr[arr.len]`. This is the level of stupid optimizer sees, once it normalizes your code, but proceeds to protect your feelings. My goal so for hblang to relay this to you as much as possible. If we can query for optimizations, we can query for bugs too.

View file

@ -13,7 +13,7 @@ path = "src/fuzz_main.rs"
[dependencies]
hbbytecode = { workspace = true, features = ["disasm"] }
hbvm = { workspace = true, features = ["nightly"] }
hbvm = { workspace = true, features = ["nightly", "alloc"] }
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] }
log = "0.4.22"

File diff suppressed because one or more lines are too long

View file

@ -314,7 +314,7 @@ impl Backend for HbvmBackend {
&& self
.jump_relocs
.last()
.map_or(true, |&(r, _)| self.offsets[r as usize] as usize != self.code.len())
.is_none_or(|&(r, _)| self.offsets[r as usize] as usize != self.code.len())
{
self.code.truncate(self.code.len() - 5);
self.ret_relocs.pop();
@ -606,9 +606,8 @@ impl TokenKind {
Some(ops[size.ilog2() as usize])
}
fn unop(&self, dst: ty::Id, src: ty::Id) -> Option<fn(u8, u8) -> EncodedInstr> {
let src_idx =
src.simple_size().unwrap_or_else(|| panic!("{:?}", src.expand())).ilog2() as usize;
fn unop(&self, dst: ty::Id, src: ty::Id, tys: &Types) -> Option<fn(u8, u8) -> EncodedInstr> {
let src_idx = tys.size_of(src).ilog2() as usize;
Some(match self {
Self::Sub => [
|a, b| sub8(a, reg::ZERO, b),
@ -655,6 +654,14 @@ enum PLoc {
Ref(Reg, u32),
}
impl PLoc {
fn reg(self) -> u8 {
match self {
PLoc::Reg(r, _) | PLoc::WideReg(r, _) | PLoc::Ref(r, _) => r,
}
}
}
struct ParamAlloc(Range<Reg>);
impl ParamAlloc {
@ -690,42 +697,7 @@ fn binary_prelude(to: &mut Vec<u8>) {
#[derive(Default)]
pub struct LoggedMem {
pub mem: hbvm::mem::HostMemory,
op_buf: Vec<hbbytecode::Oper>,
disp_buf: String,
prev_instr: Option<hbbytecode::Instr>,
}
impl LoggedMem {
unsafe fn display_instr<T>(&mut self, instr: hbbytecode::Instr, addr: hbvm::mem::Address) {
let novm: *const hbvm::Vm<Self, 0> = core::ptr::null();
let offset = core::ptr::addr_of!((*novm).memory) as usize;
let regs = unsafe {
&*core::ptr::addr_of!(
(*(((self as *mut _ as *mut u8).sub(offset)) as *const hbvm::Vm<Self, 0>))
.registers
)
};
let mut bytes = core::slice::from_raw_parts(
(addr.get() - 1) as *const u8,
core::mem::size_of::<T>() + 1,
);
use core::fmt::Write;
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
debug_assert!(bytes.is_empty());
self.disp_buf.clear();
write!(self.disp_buf, "{:<10}", format!("{instr:?}")).unwrap();
for (i, op) in self.op_buf.drain(..).enumerate() {
if i != 0 {
write!(self.disp_buf, ", ").unwrap();
}
write!(self.disp_buf, "{op:?}").unwrap();
if let hbbytecode::Oper::R(r) = op {
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
}
}
log::trace!("read-typed: {:x}: {}", addr.get(), self.disp_buf);
}
logger: hbvm::mem::InstrLogger,
}
impl hbvm::mem::Memory for LoggedMem {
@ -758,19 +730,13 @@ impl hbvm::mem::Memory for LoggedMem {
}
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: hbvm::mem::Address) -> T {
if log::log_enabled!(log::Level::Trace) {
if core::any::TypeId::of::<u8>() == core::any::TypeId::of::<T>() {
if let Some(instr) = self.prev_instr {
self.display_instr::<()>(instr, addr);
}
self.prev_instr = hbbytecode::Instr::try_from(*(addr.get() as *const u8)).ok();
} else {
let instr = self.prev_instr.take().unwrap();
self.display_instr::<T>(instr, addr);
}
self.mem.prog_read(addr)
}
self.mem.prog_read(addr)
fn log_instr(&mut self, at: hbvm::mem::Address, regs: &[hbvm::value::Value]) {
log::trace!("read-typed: {:x}: {}", at.get(), unsafe {
self.logger.display_instr(at, regs)
});
}
}

View file

@ -1,4 +1,5 @@
use {
super::ParamAlloc,
crate::{
backend::hbvm::{
reg::{self, Reg},
@ -23,7 +24,7 @@ impl HbvmBackend {
tys: &Types,
files: &EntSlice<Module, parser::Ast>,
) -> (usize, bool) {
let tail = Function::build(nodes, tys, &mut self.ralloc, sig);
let tail = FunctionBuilder::build(nodes, tys, &mut self.ralloc, sig);
let strip_load = |value| match nodes[value].kind {
Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1],
@ -32,7 +33,9 @@ impl HbvmBackend {
let mut res = mem::take(&mut self.ralloc);
Regalloc::run(nodes, tys, &mut res);
let special_reg_count = 13u8;
Regalloc::run(nodes, tys, &mut res, special_reg_count as _);
'_open_function: {
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
@ -40,12 +43,12 @@ impl HbvmBackend {
}
if let Some(PLoc::Ref(..)) = tys.parama(sig.ret).0 {
res.node_to_reg[MEM as usize] = res.bundles.len() as u8 + 1;
res.bundles.push(Bundle::new(0));
res.node_to_reg[MEM as usize] = res.general_bundles.len() as u8 + 1;
res.general_bundles.push(Bundle::default());
}
let reg_offset = if tail { reg::RET + 12 } else { reg::RET_ADDR + 1 };
let bundle_count = res.bundles.len() + (reg_offset as usize);
let reg_offset = if tail { special_reg_count } else { reg::RET_ADDR + 1 };
let bundle_count = res.general_bundles.len() + (reg_offset as usize);
res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| {
if *r == u8::MAX {
@ -325,6 +328,7 @@ impl HbvmBackend {
node.ty,
tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty),
tys,
)
.unwrap_or_else(|| {
panic!(
@ -392,8 +396,9 @@ impl HbvmBackend {
todo!("unhandled operator: {op}");
}
}
Kind::Call { args, func } => {
Kind::Call { args, func, .. } => {
let (ret, mut parama) = tys.parama(node.ty);
debug_assert!(node.ty != ty::Id::NEVER || ret.is_none());
if let Some(PLoc::Ref(r, ..)) = ret {
self.emit(instrs::cp(r, atr(*node.inputs.last().unwrap())))
}
@ -433,12 +438,15 @@ impl HbvmBackend {
self.emit(instrs::st(r, atr(*node.inputs.last().unwrap()), 0, size));
}
match ret {
Some(PLoc::WideReg(..)) => {}
Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {}
Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r),
None | Some(PLoc::Ref(..)) => {}
//match ret {
// Some(PLoc::WideReg(..)) => {}
// Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {}
// Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r),
// None | Some(PLoc::Ref(..)) => {}
//}
}
Kind::RetVal => {
self.emit_cp(atr(nid), reg::RET);
}
Kind::Global { global } => {
let reloc = Reloc::new(self.code.len(), 3, 4);
@ -517,7 +525,7 @@ impl HbvmBackend {
if tail {
bundle_count.saturating_sub(reg::RET_ADDR as _)
} else {
self.ralloc.bundles.len()
self.ralloc.general_bundles.len()
},
tail,
)
@ -530,7 +538,7 @@ impl HbvmBackend {
}
}
struct Function<'a> {
struct FunctionBuilder<'a> {
sig: Sig,
tail: bool,
nodes: &'a Nodes,
@ -538,7 +546,7 @@ struct Function<'a> {
func: &'a mut Res,
}
impl core::fmt::Debug for Function<'_> {
impl core::fmt::Debug for FunctionBuilder<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
for block in &self.func.blocks {
writeln!(f, "{:?}", self.nodes[block.entry].kind)?;
@ -551,7 +559,7 @@ impl core::fmt::Debug for Function<'_> {
}
}
impl<'a> Function<'a> {
impl<'a> FunctionBuilder<'a> {
fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool {
func.blocks.clear();
func.instrs.clear();
@ -670,11 +678,14 @@ impl<'a> Function<'a> {
self.emit_node(o);
}
}
Kind::Call { func, .. } => {
Kind::Call { func, unreachable, .. } => {
self.tail &= func == ty::Func::ECA;
if unreachable {
self.close_block(nid);
self.emit_node(node.outputs[0]);
} else {
self.add_instr(nid);
for &o in node.outputs.iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
@ -684,6 +695,7 @@ impl<'a> Function<'a> {
}
}
}
}
Kind::CInt { value: 0 } if self.nodes.is_hard_zero(nid) => {}
Kind::CInt { .. }
| Kind::BinOp { .. }
@ -691,6 +703,7 @@ impl<'a> Function<'a> {
| Kind::Global { .. }
| Kind::Load { .. }
| Kind::Stre
| Kind::RetVal
| Kind::Stck => self.add_instr(nid),
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => {}
Kind::Assert { .. } => unreachable!(),
@ -761,7 +774,13 @@ impl Nodes {
nid
}
fn uses_of(&self, nid: Nid, types: &Types, stack: &mut Vec<Nid>, buf: &mut Vec<(Nid, Nid)>) {
fn uses_of(
&self,
nid: Nid,
types: &Types,
stack: &mut Vec<Nid>,
buf: &mut Vec<(Nid, Nid, Reg)>,
) {
debug_assert!(stack.is_empty());
debug_assert!(buf.is_empty());
@ -777,13 +796,38 @@ impl Nodes {
continue;
}
if self.is_unlocked(o) {
buf.push((self.use_block_of(exp, o), o));
buf.push((self.use_block_of(exp, o), o, self.use_reg_of(exp, o)));
} else {
stack.push(o);
}
}
}
}
#[expect(unused)]
fn init_loc_of(&self, def: Nid, types: &Types) -> Reg {
if self[def].kind == Kind::Arg {
let mut parama = ParamAlloc(0..11);
let (_, ploc) = self[VOID]
.outputs
.iter()
.skip(ARG_START)
.map(|&n| (n, parama.next(self[n].ty, types)))
.find(|&(n, _)| n == def)
.unwrap();
return ploc.unwrap().reg();
}
255
}
#[expect(unused)]
fn use_reg_of(&self, def: Nid, usage: Nid) -> Reg {
//if matches!(self[usage].kind, Kind::Return { .. }) {}
255
}
}
struct Regalloc<'a> {
@ -806,27 +850,35 @@ impl<'a> Regalloc<'a> {
self.res.backrefs[nid as usize]
}
fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res) {
Self { nodes: ctx, tys, res }.run_low();
fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res, special_count: usize) {
Self { nodes: ctx, tys, res }.run_low(special_count);
}
fn run_low(&mut self) {
self.res.bundles.clear();
fn run_low(&mut self, #[expect(unused)] special_count: usize) {
self.res.general_bundles.clear();
self.res.node_to_reg.clear();
#[cfg(debug_assertions)]
self.res.marked.clear();
self.res.node_to_reg.resize(self.nodes.vreg_count(), 0);
self.res.call_set.clear();
for (i, &instr) in self.res.instrs.iter().enumerate() {
if self.nodes[instr].kind.is_call() {
self.res.call_set.add_one(i);
}
}
debug_assert!(self.res.dfs_buf.is_empty());
let mut uses_buf = Vec::new();
let mut bundle = Bundle::new(self.res.instrs.len());
let mut range_buf = Vec::new();
let mut bundle = Bundle::default();
self.res.visited.clear(self.nodes.len());
for i in (0..self.res.blocks.len()).rev() {
for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) {
if self.res.visited.set(a) {
self.append_bundle(a, &mut bundle, None, &mut uses_buf);
self.append_bundle(a, &mut bundle, None, &mut uses_buf, &mut range_buf);
}
for r in rest {
@ -839,6 +891,7 @@ impl<'a> Regalloc<'a> {
&mut bundle,
Some(self.res.node_to_reg[a as usize] as usize - 1),
&mut uses_buf,
&mut range_buf,
);
}
}
@ -849,7 +902,7 @@ impl<'a> Regalloc<'a> {
if self.nodes[inst].has_no_value() || self.res.visited.get(inst) || inst == 0 {
continue;
}
self.append_bundle(inst, &mut bundle, None, &mut uses_buf);
self.append_bundle(inst, &mut bundle, None, &mut uses_buf, &mut range_buf);
}
self.res.instrs = instrs;
}
@ -859,12 +912,16 @@ impl<'a> Regalloc<'a> {
inst: Nid,
tmp: &mut Bundle,
prefered: Option<usize>,
uses_buf: &mut Vec<(Nid, Nid)>,
uses_buf: &mut Vec<(Nid, Nid, Reg)>,
range_buf: &mut Vec<Range<usize>>,
) {
let dom = self.nodes.idom_of(inst);
self.res.dfs_seem.clear(self.nodes.len());
self.nodes.uses_of(inst, self.tys, &mut self.res.dfs_buf, uses_buf);
for (cursor, uinst) in uses_buf.drain(..) {
let mut prefered_reg = reg::ZERO;
for (cursor, uinst, reg) in uses_buf.drain(..) {
prefered_reg = prefered_reg.min(reg);
if !self.res.dfs_seem.set(uinst) {
continue;
}
@ -894,8 +951,22 @@ impl<'a> Regalloc<'a> {
range.end = new;
debug_assert!(range.start < range.end, "{:?} {inst} {uinst}", range);
tmp.add(range);
range_buf.push(range)
});
range_buf.sort_unstable_by_key(|r| r.start);
range_buf.dedup_by(|a, b| {
if b.end == a.start {
b.end = a.end;
true
} else {
false
}
});
for range in range_buf.drain(..) {
tmp.add(range);
}
}
if tmp.is_empty() {
@ -904,23 +975,23 @@ impl<'a> Regalloc<'a> {
}
if let Some(prefered) = prefered
&& !self.res.bundles[prefered].overlaps(tmp)
&& !self.res.general_bundles[prefered].overlaps(tmp)
{
self.res.bundles[prefered].merge(tmp);
self.res.general_bundles[prefered].merge(tmp);
tmp.clear();
self.res.node_to_reg[inst as usize] = prefered as Reg + 1;
return;
}
match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) {
match self.res.general_bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) {
Some((i, other)) => {
other.merge(tmp);
tmp.clear();
self.res.node_to_reg[inst as usize] = i as Reg + 1;
}
None => {
self.res.bundles.push(tmp.take());
self.res.node_to_reg[inst as usize] = self.res.bundles.len() as Reg;
self.res.general_bundles.push(tmp.take());
self.res.node_to_reg[inst as usize] = self.res.general_bundles.len() as Reg;
}
}
}
@ -971,7 +1042,8 @@ pub(super) struct Res {
instrs: Vec<Nid>,
backrefs: Vec<u16>,
bundles: Vec<Bundle>,
general_bundles: Vec<Bundle>,
call_set: Bundle,
node_to_reg: Vec<Reg>,
visited: BitSet,
@ -982,37 +1054,83 @@ pub(super) struct Res {
}
struct Bundle {
taken: Vec<bool>,
start: usize,
end: usize,
usage: BitSet,
}
impl Default for Bundle {
fn default() -> Self {
Self { start: usize::MAX, end: 0, usage: Default::default() }
}
}
impl Bundle {
fn new(size: usize) -> Self {
Self { taken: vec![false; size] }
}
fn add(&mut self, range: Range<usize>) {
self.taken[range].fill(true);
debug_assert!(!range.is_empty());
debug_assert!(range.start / BitSet::UNIT >= self.start || self.start == usize::MAX);
self.start = self.start.min(range.start / BitSet::UNIT);
self.end = self.end.max(range.end.div_ceil(BitSet::UNIT));
let proj_range =
range.start - self.start * BitSet::UNIT..range.end - self.start * BitSet::UNIT;
self.usage.set_range(proj_range)
}
fn overlaps(&self, other: &Self) -> bool {
self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
fn overlaps(&self, othr: &Self) -> bool {
let overlap = self.start.max(othr.start)..self.end.min(othr.end);
if overlap.start >= overlap.end {
return false;
}
fn merge(&mut self, other: &Self) {
debug_assert!(!self.overlaps(other));
self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a |= *b);
let [mut sslot, mut oslot] = [0, 0];
let sunits =
&self.usage.units(&mut sslot)[overlap.start - self.start..overlap.end - self.start];
let ounits =
&othr.usage.units(&mut oslot)[overlap.start - othr.start..overlap.end - othr.start];
debug_assert_eq!(sunits.len(), ounits.len());
let res = sunits.iter().zip(ounits).any(|(a, b)| (a & b) != 0);
res
}
fn merge(&mut self, othr: &Self) {
debug_assert!(!self.overlaps(othr));
debug_assert!(self.start <= othr.start || self.start == usize::MAX);
self.usage.reserve((othr.end - self.start) * BitSet::UNIT);
self.start = self.start.min(othr.start);
self.end = self.end.max(othr.end);
let sunits =
&mut self.usage.units_mut().unwrap()[othr.start - self.start..othr.end - self.start];
let mut oslot = 0;
let ounits = othr.usage.units(&mut oslot);
sunits.iter_mut().zip(ounits).for_each(|(a, b)| *a |= *b);
}
fn clear(&mut self) {
self.taken.fill(false);
self.start = usize::MAX;
self.end = 0;
self.usage.clear_as_is();
}
fn is_empty(&self) -> bool {
!self.taken.contains(&true)
self.end == 0
}
fn take(&mut self) -> Self {
mem::replace(self, Self::new(self.taken.len()))
let mut new = Self { start: 0, ..Self::default() };
new.merge(self);
self.clear();
new
}
fn add_one(&mut self, i: usize) {
self.start = self.start.min(i / BitSet::UNIT);
self.end = self.end.max(i.div_ceil(BitSet::UNIT));
self.usage.set(i as _);
}
}

View file

@ -2,7 +2,8 @@ use {
crate::{
lexer::{self, Lexer, TokenKind},
parser::{
self, CommentOr, CtorField, EnumField, Expr, FieldList, Poser, Radix, StructField,
self, CommentOr, CtorField, EnumField, Expr, FieldList, ListKind, Poser, Radix,
StructField, UnionField,
},
},
core::{
@ -50,27 +51,29 @@ enum TokenGroup {
Ctor,
}
fn token_group(kind: TokenKind) -> TokenGroup {
use {crate::lexer::TokenKind::*, TokenGroup as TG};
match kind {
impl TokenKind {
fn to_higlight_group(self) -> TokenGroup {
use {TokenGroup as TG, TokenKind::*};
match self {
BSlash | Pound | Eof | Ct => TG::Blank,
Comment => TG::Comment,
Directive => TG::Directive,
Colon => TG::Colon,
Semi | Comma => TG::Comma,
Dot => TG::Dot,
Ctor | Tupl | TArrow => TG::Ctor,
Ctor | Arr | Tupl | TArrow | Range => TG::Ctor,
LParen | RParen => TG::Paren,
LBrace | RBrace | LBrack | RBrack => TG::Bracket,
Number | Float => TG::Number,
Under | CtIdent | Ident => TG::Identifier,
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl | Shr
| Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss | ModAss
| ShrAss | ShlAss => TG::Assign,
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl
| Shr | Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss
| ModAss | ShrAss | ShlAss => TG::Assign,
DQuote | Quote => TG::String,
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die | Struct
| Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die
| Struct | Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
}
}
}
@ -88,7 +91,7 @@ pub fn get_token_kinds(mut source: &mut [u8]) -> usize {
let start = token.start as usize;
let end = token.end as usize;
source[..start].fill(0);
source[start..end].fill(token_group(token.kind) as u8);
source[start..end].fill(token.kind.to_higlight_group() as u8);
source = &mut source[end..];
}
len
@ -221,12 +224,12 @@ impl<'a> Formatter<'a> {
f.write_str(sep)?;
}
if let Some(expr) = list.get(i + 1)
&& let Some(rest) = self.source.get(expr.posi() as usize..)
&& let Some(prev) = self.source.get(..expr.posi() as usize)
{
if sep.is_empty() && insert_needed_semicolon(rest) {
if sep.is_empty() && prev.trim_end().ends_with(';') {
f.write_str(";")?;
}
if preserve_newlines(&self.source[..expr.posi() as usize]) > 1 {
if count_trailing_newlines(prev) > 1 {
f.write_str("\n")?;
}
}
@ -304,10 +307,6 @@ impl<'a> Formatter<'a> {
}
match *expr {
Expr::Ct { value, .. } => {
f.write_str("$: ")?;
self.fmt(value, f)
}
Expr::Defer { value, .. } => {
f.write_str("defer ")?;
self.fmt(value, f)
@ -323,6 +322,16 @@ impl<'a> Formatter<'a> {
f.write_str(".")?;
f.write_str(field)
}
Expr::Range { start, end, .. } => {
if let Some(start) = start {
self.fmt(start, f)?;
}
f.write_str("..")?;
if let Some(end) = end {
self.fmt(end, f)?;
}
Ok(())
}
Expr::Directive { name, args, .. } => {
f.write_str("@")?;
f.write_str(name)?;
@ -339,10 +348,15 @@ impl<'a> Formatter<'a> {
"struct",
trailing_comma,
fields,
|s, StructField { name, ty, .. }, f| {
|s, StructField { name, ty, default_value, .. }, f| {
f.write_str(name)?;
f.write_str(": ")?;
s.fmt(ty, f)
s.fmt(ty, f)?;
if let Some(deva) = default_value {
f.write_str(" = ")?;
s.fmt(deva, f)?;
}
Ok(())
},
)
}
@ -351,7 +365,7 @@ impl<'a> Formatter<'a> {
"union",
trailing_comma,
fields,
|s, StructField { name, ty, .. }, f| {
|s, UnionField { name, ty, .. }, f| {
f.write_str(name)?;
f.write_str(": ")?;
s.fmt(ty, f)
@ -366,7 +380,7 @@ impl<'a> Formatter<'a> {
),
Expr::Ctor { ty, fields, trailing_comma, .. } => {
if let Some(ty) = ty {
self.fmt_paren(ty, f, unary)?;
self.fmt_paren(ty, f, postfix)?;
}
f.write_str(".{")?;
self.fmt_list(
@ -385,38 +399,43 @@ impl<'a> Formatter<'a> {
},
)
}
Expr::Tupl {
Expr::List {
pos,
kind: term,
ty: Some(&Expr::Slice { pos: spos, size: Some(&Expr::Number { value, .. }), item }),
fields,
trailing_comma,
} if value as usize == fields.len() => self.fmt(
&Expr::Tupl {
&Expr::List {
pos,
kind: term,
ty: Some(&Expr::Slice { pos: spos, size: None, item }),
fields,
trailing_comma,
},
f,
),
Expr::Tupl { ty, fields, trailing_comma, .. } => {
Expr::List { ty, kind: term, fields, trailing_comma, .. } => {
if let Some(ty) = ty {
self.fmt_paren(ty, f, unary)?;
self.fmt_paren(ty, f, postfix)?;
}
f.write_str(".(")?;
self.fmt_list(f, trailing_comma, ")", ",", fields, Self::fmt)
let (start, end) = match term {
ListKind::Tuple => (".(", ")"),
ListKind::Array => (".[", "]"),
};
f.write_str(start)?;
self.fmt_list(f, trailing_comma, end, ",", fields, Self::fmt)
}
Expr::Slice { item, size, .. } => {
f.write_str("[")?;
self.fmt(item, f)?;
if let Some(size) = size {
f.write_str("; ")?;
self.fmt(size, f)?;
}
f.write_str("]")
f.write_str("]")?;
self.fmt_paren(item, f, unary)
}
Expr::Index { base, index } => {
self.fmt(base, f)?;
self.fmt_paren(base, f, postfix)?;
f.write_str("[")?;
self.fmt(index, f)?;
f.write_str("]")
@ -539,7 +558,7 @@ impl<'a> Formatter<'a> {
prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1);
let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start;
prev = &prev[..exact_bound as usize + estimate_bound];
if preserve_newlines(prev) > 0 {
if count_trailing_newlines(prev) > 0 {
f.write_str("\n")?;
for _ in 0..self.depth + 1 {
f.write_str("\t")?;
@ -547,7 +566,9 @@ impl<'a> Formatter<'a> {
f.write_str(op.name())?;
f.write_str(" ")?;
} else {
if op != TokenKind::Colon {
f.write_str(" ")?;
}
f.write_str(op.name())?;
f.write_str(" ")?;
}
@ -562,15 +583,10 @@ impl<'a> Formatter<'a> {
}
}
pub fn preserve_newlines(source: &str) -> usize {
pub fn count_trailing_newlines(source: &str) -> usize {
source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count()
}
pub fn insert_needed_semicolon(source: &str) -> bool {
let kind = lexer::Lexer::new(source).eat().kind;
kind.precedence().is_some() || matches!(kind, TokenKind::Ctor | TokenKind::Tupl)
}
impl core::fmt::Display for parser::Ast {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt_file(self.exprs(), &self.file, f)
@ -581,14 +597,14 @@ pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Res
for (i, expr) in exprs.iter().enumerate() {
Formatter::new(file).fmt(expr, f)?;
if let Some(expr) = exprs.get(i + 1)
&& let Some(rest) = file.get(expr.pos() as usize..)
&& let Some(prefix) = file.get(..expr.pos() as usize)
{
if insert_needed_semicolon(rest) {
write!(f, ";")?;
if prefix.trim_end().ends_with(';') {
f.write_str(";")?;
}
if preserve_newlines(&file[..expr.pos() as usize]) > 1 {
writeln!(f)?;
if count_trailing_newlines(prefix) > 1 {
f.write_str("\n")?;
}
}

View file

@ -177,8 +177,9 @@ pub fn run_compiler(
if options.dump_asm {
let mut disasm = String::new();
codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()))?;
let err = codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()));
*out = disasm.into_bytes();
err?
}
}

View file

@ -138,7 +138,9 @@ pub enum TokenKind {
Ctor,
Tupl,
Arr,
TArrow,
Range,
Or,
And,
@ -347,7 +349,9 @@ gen_token_kind! {
#[punkt]
Ctor = ".{",
Tupl = ".(",
Arr = ".[",
TArrow = "=>",
Range = "..",
// #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest
#[ops]
#[prec]
@ -430,6 +434,19 @@ impl<'a> Lexer<'a> {
Lexer { pos: self.pos, source: self.source }.eat()
}
fn peek_n<const N: usize>(&self) -> Option<&[u8; N]> {
if core::intrinsics::unlikely(self.pos as usize + N > self.source.len()) {
None
} else {
Some(unsafe {
self.source
.get_unchecked(self.pos as usize..self.pos as usize + N)
.first_chunk()
.unwrap_unchecked()
})
}
}
fn peek(&self) -> Option<u8> {
if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) {
None
@ -498,7 +515,11 @@ impl<'a> Lexer<'a> {
self.advance();
}
if self.advance_if(b'.') {
if self
.peek_n()
.map_or_else(|| self.peek() == Some(b'.'), |&[a, b]| a == b'.' && b != b'.')
{
self.pos += 1;
while let Some(b'0'..=b'9') = self.peek() {
self.advance();
}
@ -550,6 +571,8 @@ impl<'a> Lexer<'a> {
}
b'.' if self.advance_if(b'{') => T::Ctor,
b'.' if self.advance_if(b'(') => T::Tupl,
b'.' if self.advance_if(b'[') => T::Arr,
b'.' if self.advance_if(b'.') => T::Range,
b'=' if self.advance_if(b'>') => T::TArrow,
b'&' if self.advance_if(b'&') => T::And,
b'|' if self.advance_if(b'|') => T::Or,

View file

@ -24,7 +24,8 @@
iter_next_chunk,
pointer_is_aligned_to,
maybe_uninit_fill,
array_chunks
array_chunks,
array_windows
)]
#![warn(clippy::dbg_macro)]
#![expect(internal_features)]
@ -126,6 +127,8 @@ pub mod backend {
mod utils;
mod debug {
use core::fmt::Debug;
pub fn panicking() -> bool {
#[cfg(feature = "std")]
{
@ -138,14 +141,14 @@ mod debug {
}
#[cfg(all(debug_assertions, feature = "std"))]
pub type Trace = std::rc::Rc<std::backtrace::Backtrace>;
pub type Trace = std::rc::Rc<(std::backtrace::Backtrace, std::string::String)>;
#[cfg(not(all(debug_assertions, feature = "std")))]
pub type Trace = ();
pub fn trace() -> Trace {
pub fn trace(_hint: impl Debug) -> Trace {
#[cfg(all(debug_assertions, feature = "std"))]
{
std::rc::Rc::new(std::backtrace::Backtrace::capture())
std::rc::Rc::new((std::backtrace::Backtrace::capture(), format!("{_hint:?}")))
}
#[cfg(not(all(debug_assertions, feature = "std")))]
{}

View file

@ -97,6 +97,7 @@ impl Nodes {
debug_assert_ne!(next, 0);
if matches!(self[cursor].kind, Kind::Then | Kind::Else) {
debug_assert_eq!(self[next].kind, Kind::If);
debug_assert_eq!(self[next].ty, ty::Id::VOID);
let other = self[next].outputs[(self[next].outputs[0] == cursor) as usize];
self[other].loop_depth.set(depth - 1);
}
@ -168,14 +169,10 @@ impl Nodes {
let mut deepest = self[node].inputs[0];
for &inp in self[node].inputs[1..].iter() {
if self.idepth(inp, Some(scheds)) > self.idepth(deepest, Some(scheds)) {
if self[inp].kind.is_call() {
deepest = inp;
} else {
debug_assert!(!self.is_cfg(inp));
deepest = self.idom(inp, Some(scheds));
}
}
}
scheds[node as usize] = deepest;
}
@ -198,8 +195,8 @@ impl Nodes {
for &node in rpo.iter().rev() {
self.loop_depth(node, Some(scheds));
for i in 0..self[node].inputs.len() {
self.push_up_impl(self[node].inputs[i], visited, scheds);
for &i in self[node].inputs.iter() {
self.push_up_impl(i, visited, scheds);
}
if matches!(self[node].kind, Kind::Loop | Kind::Region) {
@ -216,13 +213,13 @@ impl Nodes {
self.iter()
.map(|(n, _)| n)
.filter(|&n| !visited.get(n)
&& !matches!(self[n].kind, Kind::Arg | Kind::Mem | Kind::Loops))
&& !matches!(self[n].kind, Kind::Arg | Kind::Mem | Kind::Loops | Kind::RetVal))
.collect::<Vec<_>>(),
vec![],
"{:?}",
self.iter()
.filter(|&(n, nod)| !visited.get(n)
&& !matches!(nod.kind, Kind::Arg | Kind::Mem | Kind::Loops))
&& !matches!(nod.kind, Kind::Arg | Kind::Mem | Kind::Loops | Kind::RetVal))
.collect::<Vec<_>>()
);
@ -287,6 +284,7 @@ impl Nodes {
let cfg_idx = outputs.iter().position(|&n| self.is_cfg(n)).unwrap();
outputs.swap(cfg_idx, 0);
for &o in outputs.iter() {
if (!self.is_cfg(o)
&& self[o].outputs.iter().any(|&oi| {
@ -297,9 +295,7 @@ impl Nodes {
continue;
}
let mut cursor = buf.len();
for &o in outputs.iter().filter(|&&n| n == o) {
buf.push(o);
}
while let Some(&n) = buf.get(cursor) {
for &i in &self[n].inputs[1..] {
if fromc == self[i].inputs.first()
@ -310,15 +306,17 @@ impl Nodes {
})
&& seen.set(i)
{
for &o in outputs.iter().filter(|&&n| n == i) {
buf.push(o);
}
buf.push(i);
}
}
cursor += 1;
}
}
buf[1..].sort_by_key(|&n| {
self[n].has_no_value() || !self[n].outputs.iter().all(|&o| self[o].kind == Kind::Phi)
});
debug_assert_eq!(
outputs.iter().filter(|&&n| !seen.get(n)).copied().collect::<Vec<_>>(),
vec![],
@ -701,7 +699,7 @@ impl Nodes {
if self.free == Nid::MAX {
self.free = self.values.len() as _;
self.values.push(Err((Nid::MAX, debug::trace())));
self.values.push(Err((Nid::MAX, debug::trace(""))));
}
let free = self.free;
@ -777,13 +775,11 @@ impl Nodes {
}
self.remove_node_lookup(target);
let trace = debug::trace(&self.values[target as usize]);
if cfg!(debug_assertions) {
mem::replace(&mut self.values[target as usize], Err((Nid::MAX, debug::trace())))
.unwrap();
mem::replace(&mut self.values[target as usize], Err((Nid::MAX, trace))).unwrap();
} else {
mem::replace(&mut self.values[target as usize], Err((self.free, debug::trace())))
.unwrap();
mem::replace(&mut self.values[target as usize], Err((self.free, trace))).unwrap();
self.free = target;
}
@ -1565,6 +1561,7 @@ impl Nodes {
K::Start => {}
_ if self.is_cfg(target) && self.idom(target, None) == NEVER => panic!(),
K::Entry
| K::RetVal
| K::Mem
| K::Loops
| K::End
@ -1638,6 +1635,7 @@ impl Nodes {
}
pub fn replace(&mut self, target: Nid, with: Nid) {
self.patch_aclass(target, with);
debug_assert_ne!(target, with, "{:?}", self[target]);
for out in self[target].outputs.clone() {
let index = self[out].inputs.iter().position(|&p| p == target).unwrap();
@ -1714,7 +1712,7 @@ impl Nodes {
Kind::BinOp { op } | Kind::UnOp { op } => {
write!(out, "{:>4}: ", op.name())
}
Kind::Call { func, args: _ } => {
Kind::Call { func, args: _, unreachable: _ } => {
write!(out, "call: {func} {} ", self[node].depth.get())
}
Kind::Global { global } => write!(out, "glob: {global:<5}"),
@ -1727,6 +1725,7 @@ impl Nodes {
Kind::Mem => write!(out, " mem: "),
Kind::Loops => write!(out, "loops: "),
Kind::Join => write!(out, "join: "),
Kind::RetVal => write!(out, "rval: "),
}?;
if self[node].kind != Kind::Loop && self[node].kind != Kind::Region {
@ -1981,6 +1980,25 @@ impl Nodes {
self[blocker].peep_triggers.push(target);
}
}
fn patch_aclass(&mut self, target: Nid, with: Nid) {
let (_, region) = self.aclass_index(target);
if region == 0 {
return;
}
fn patch_aclass_inner(s: &mut Nodes, root: Nid, with: Nid, matches: Nid) {
for out in s[root].outputs.clone() {
let (_, region) = s.aclass_index(out);
if region == matches {
s.pass_aclass(with, out);
patch_aclass_inner(s, out, with, matches);
}
}
}
patch_aclass_inner(self, target, with, target);
}
}
impl ops::Index<Nid> for Nodes {
@ -2050,8 +2068,7 @@ impl Node {
}
pub fn has_no_value(&self) -> bool {
(self.kind.is_cfg() && (!self.kind.is_call() || self.ty == ty::Id::VOID))
|| matches!(self.kind, Kind::Stre)
self.kind.is_cfg() || matches!(self.kind, Kind::Stre)
}
}
@ -2086,6 +2103,12 @@ pub enum Kind {
Return {
file: ty::Module,
},
// [ctrl, ...args]
Call {
unreachable: bool,
func: ty::Func,
args: ty::List,
},
// [ctrl]
Die,
// [ctrl]
@ -2107,11 +2130,7 @@ pub enum Kind {
Global {
global: ty::Global,
},
// [ctrl, ...args]
Call {
func: ty::Func,
args: ty::Tuple,
},
RetVal,
// [ctrl, cond, value]
Assert {
kind: AssertKind,
@ -2137,7 +2156,9 @@ impl Kind {
}
fn is_pinned(&self) -> bool {
self.is_cfg() || self.is_at_start() || matches!(self, Self::Phi | Kind::Assert { .. })
self.is_cfg()
|| self.is_at_start()
|| matches!(self, Self::Phi | Self::Assert { .. } | Self::RetVal)
}
fn is_at_start(&self) -> bool {
@ -2163,6 +2184,7 @@ impl Kind {
fn ends_basic_block(&self) -> bool {
matches!(self, Self::Return { .. } | Self::If | Self::End | Self::Die)
|| matches!(self, Kind::Call { unreachable: true, .. })
}
pub fn starts_basic_block(&self) -> bool {

View file

@ -31,7 +31,7 @@ pub enum FileKind {
Embed,
}
trait Trans {
pub trait Trans {
fn trans(self) -> Self;
}
@ -80,6 +80,7 @@ struct ScopeIdent {
declared: bool,
ordered: bool,
used: bool,
is_ct: bool,
flags: IdentFlags,
}
@ -196,8 +197,8 @@ impl<'a, 'b> Parser<'a, 'b> {
fn declare_rec(&mut self, expr: &Expr, top_level: bool) {
match *expr {
Expr::Ident { pos, id, is_first, .. } => {
self.declare(pos, id, !top_level, is_first || top_level)
Expr::Ident { pos, id, is_first, is_ct, .. } => {
self.declare(pos, id, !top_level, is_first || top_level, is_ct)
}
Expr::Ctor { fields, .. } => {
for CtorField { value, .. } in fields {
@ -208,7 +209,7 @@ impl<'a, 'b> Parser<'a, 'b> {
}
}
fn declare(&mut self, pos: Pos, id: Ident, ordered: bool, valid_order: bool) {
fn declare(&mut self, pos: Pos, id: Ident, ordered: bool, valid_order: bool, is_ct: bool) {
if !valid_order {
self.report(
pos,
@ -230,7 +231,7 @@ impl<'a, 'b> Parser<'a, 'b> {
);
return;
}
self.ctx.idents[index].is_ct = is_ct;
self.ctx.idents[index].ordered = ordered;
}
@ -267,6 +268,7 @@ impl<'a, 'b> Parser<'a, 'b> {
declared: false,
used: false,
ordered: false,
is_ct: false,
flags: 0,
});
(self.ctx.idents.len() - 1, self.ctx.idents.last_mut().unwrap(), true)
@ -276,7 +278,7 @@ impl<'a, 'b> Parser<'a, 'b> {
id.flags |= idfl::COMPTIME * is_ct as u32;
if id.declared && id.ordered && self.ns_bound > i {
id.flags |= idfl::COMPTIME;
self.ctx.captured.push(id.ident);
self.ctx.captured.push(CapturedIdent { id: id.ident, is_ct: id.is_ct });
}
(id.ident, bl)
@ -287,6 +289,10 @@ impl<'a, 'b> Parser<'a, 'b> {
}
fn unit_expr(&mut self) -> Option<Expr<'a>> {
self.unit_expr_low(true)
}
fn unit_expr_low(&mut self, eat_tail: bool) -> Option<Expr<'a>> {
use {Expr as E, TokenKind as T};
if matches!(
@ -302,7 +308,6 @@ impl<'a, 'b> Parser<'a, 'b> {
let prev_captured = self.ctx.captured.len();
let mut must_trail = false;
let mut expr = match token.kind {
T::Ct => E::Ct { pos, value: self.ptr_expr()? },
T::Defer => E::Defer { pos, value: self.ptr_expr()? },
T::Slf => E::Slf { pos },
T::Directive if self.lexer.slice(token.range()) == "use" => {
@ -378,10 +383,15 @@ impl<'a, 'b> Parser<'a, 'b> {
}
let name = s.expect_advance(T::Ident)?;
s.expect_advance(T::Colon)?;
let (ty, default_value) = match s.expr()? {
Expr::BinOp { left, op: T::Assign, right, .. } => (*left, Some(*right)),
ty => (ty, None),
};
Some(Some(StructField {
pos: name.start,
name: s.tok_str(name),
ty: s.expr()?,
ty,
default_value,
}))
})?,
captured: self.collect_captures(prev_boundary, prev_captured),
@ -395,11 +405,7 @@ impl<'a, 'b> Parser<'a, 'b> {
}
let name = s.expect_advance(T::Ident)?;
s.expect_advance(T::Colon)?;
Some(Some(StructField {
pos: name.start,
name: s.tok_str(name),
ty: s.expr()?,
}))
Some(Some(UnionField { pos: name.start, name: s.tok_str(name), ty: s.expr()? }))
})?,
captured: self.collect_captures(prev_boundary, prev_captured),
trailing_comma: core::mem::take(&mut self.trailing_sep) || must_trail,
@ -467,7 +473,7 @@ impl<'a, 'b> Parser<'a, 'b> {
self.collect_list(T::Comma, T::RParen, |s| {
let name = s.advance_ident()?;
let (id, _) = s.resolve_ident(name);
s.declare(name.start, id, true, true);
s.declare(name.start, id, true, true, name.kind == T::CtIdent);
s.expect_advance(T::Colon)?;
Some(Arg {
pos: name.start,
@ -485,14 +491,20 @@ impl<'a, 'b> Parser<'a, 'b> {
body: self.ptr_expr()?,
},
T::Ctor => self.ctor(pos, None),
T::Tupl => self.tupl(pos, None),
T::Tupl => self.tupl(pos, None, ListKind::Tuple),
T::Arr => self.tupl(pos, None, ListKind::Array),
T::LBrack => E::Slice {
item: self.ptr_unit_expr()?,
size: self.advance_if(T::Semi).then(|| self.ptr_expr()).trans()?,
pos: {
size: {
if self.advance_if(T::RBrack) {
None
} else {
let adv = self.ptr_expr()?;
self.expect_advance(T::RBrack)?;
pos
Some(adv)
}
},
item: self.arena.alloc(self.unit_expr_low(false)?),
pos,
},
T::Band | T::Mul | T::Xor | T::Sub | T::Que | T::Not | T::Dot => E::UnOp {
pos,
@ -544,9 +556,13 @@ impl<'a, 'b> Parser<'a, 'b> {
tok => self.report(token.start, format_args!("unexpected token: {tok}"))?,
};
if eat_tail {
loop {
let token = self.token;
if matches!(token.kind, T::LParen | T::Ctor | T::Dot | T::Tupl | T::LBrack) {
if matches!(
token.kind,
T::LParen | T::Ctor | T::Dot | T::Tupl | T::Arr | T::LBrack | T::Colon
) {
self.next();
}
@ -557,14 +573,56 @@ impl<'a, 'b> Parser<'a, 'b> {
trailing_comma: core::mem::take(&mut self.trailing_sep),
},
T::Ctor => self.ctor(token.start, Some(expr)),
T::Tupl => self.tupl(token.start, Some(expr)),
T::Tupl => self.tupl(token.start, Some(expr), ListKind::Tuple),
T::Arr => self.tupl(token.start, Some(expr), ListKind::Array),
T::LBrack => E::Index {
base: self.arena.alloc(expr),
index: {
let index = self.expr()?;
index: self.arena.alloc({
if self.advance_if(T::Range) {
let pos = self.token.start;
if self.advance_if(T::RBrack) {
Expr::Range { pos, start: None, end: None }
} else {
let res = Expr::Range {
pos,
start: None,
end: Some(self.ptr_expr()?),
};
self.expect_advance(T::RBrack)?;
self.arena.alloc(index)
res
}
} else {
let start = self.expr()?;
let pos = self.token.start;
if self.advance_if(T::Range) {
let start = self.arena.alloc(start);
if self.advance_if(T::RBrack) {
Expr::Range { pos, start: Some(start), end: None }
} else {
let res = Expr::Range {
pos,
start: Some(start),
end: Some(self.ptr_expr()?),
};
self.expect_advance(T::RBrack)?;
res
}
} else {
self.expect_advance(T::RBrack)?;
start
}
}
}),
},
T::Colon => E::BinOp {
left: {
self.declare_rec(&expr, false);
self.arena.alloc(expr)
},
pos,
op: T::Colon,
right: self.ptr_expr()?,
},
T::Dot => E::Field {
target: self.arena.alloc(expr),
@ -577,6 +635,7 @@ impl<'a, 'b> Parser<'a, 'b> {
_ => break,
}
}
}
if matches!(token.kind, T::Loop | T::LBrace | T::Fn | T::Struct) {
self.pop_scope(frame);
@ -585,11 +644,12 @@ impl<'a, 'b> Parser<'a, 'b> {
Some(expr)
}
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>) -> Expr<'a> {
Expr::Tupl {
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>, kind: ListKind) -> Expr<'a> {
Expr::List {
pos,
kind,
ty: ty.map(|ty| self.arena.alloc(ty)),
fields: self.collect_list(TokenKind::Comma, TokenKind::RParen, Self::expr),
fields: self.collect_list(TokenKind::Comma, kind.term(), Self::expr),
trailing_comma: core::mem::take(&mut self.trailing_sep),
}
}
@ -639,7 +699,11 @@ impl<'a, 'b> Parser<'a, 'b> {
}))
}
fn collect_captures(&mut self, prev_captured: usize, prev_boundary: usize) -> &'a [Ident] {
fn collect_captures(
&mut self,
prev_captured: usize,
prev_boundary: usize,
) -> &'a [CapturedIdent] {
self.ns_bound = prev_boundary;
let captured = &mut self.ctx.captured[prev_captured..];
crate::quad_sort(captured, core::cmp::Ord::cmp);
@ -704,7 +768,9 @@ impl<'a, 'b> Parser<'a, 'b> {
) -> &'a [T] {
let mut trailing_sep = false;
let mut view = self.ctx.stack.view();
'o: while (keep_end && self.token.kind != end) || (!keep_end && !self.advance_if(end)) {
'o: while (keep_end && self.token.kind != end)
|| (!keep_end && !self.advance_if(end)) && self.token.kind != TokenKind::Eof
{
let val = match f(self) {
Some(val) => val,
None => {
@ -777,6 +843,9 @@ impl<'a, 'b> Parser<'a, 'b> {
#[track_caller]
fn report(&mut self, pos: Pos, msg: impl fmt::Display) -> Option<!> {
if log::log_enabled!(log::Level::Error) {
if self.ctx.errors.get_mut().len() > 1024 * 10 {
panic!("{}", self.ctx.errors.get_mut());
}
use core::fmt::Write;
writeln!(
self.ctx.errors.get_mut(),
@ -790,15 +859,19 @@ impl<'a, 'b> Parser<'a, 'b> {
fn flag_idents(&mut self, e: Expr<'a>, flags: IdentFlags) {
match e {
Expr::Ident { id, .. } => find_ident(&mut self.ctx.idents, id).flags |= flags,
Expr::Ident { id, .. } => {
if let Some(f) = find_ident(&mut self.ctx.idents, id) {
f.flags |= flags;
}
}
Expr::Field { target, .. } => self.flag_idents(*target, flags),
_ => {}
}
}
}
fn find_ident(idents: &mut [ScopeIdent], id: Ident) -> &mut ScopeIdent {
idents.binary_search_by_key(&id, |si| si.ident).map(|i| &mut idents[i]).unwrap()
fn find_ident(idents: &mut [ScopeIdent], id: Ident) -> Option<&mut ScopeIdent> {
idents.binary_search_by_key(&id, |si| si.ident).map(|i| &mut idents[i]).ok()
}
pub fn find_symbol(symbols: &[Symbol], id: Ident) -> &Symbol {
@ -879,11 +952,6 @@ generate_expr! {
/// `OP := grep for `#define OP:`
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Expr<'a> {
/// `'ct' Expr`
Ct {
pos: Pos,
value: &'a Self,
},
/// `'defer' Expr`
Defer {
pos: Pos,
@ -998,22 +1066,22 @@ generate_expr! {
Struct {
pos: Pos,
fields: FieldList<'a, StructField<'a>>,
captured: &'a [Ident],
captured: &'a [CapturedIdent],
trailing_comma: bool,
packed: bool,
},
/// `'union' LIST('{', ',', '}', Ident ':' Expr)`
Union {
pos: Pos,
fields: FieldList<'a, StructField<'a>>,
captured: &'a [Ident],
fields: FieldList<'a, UnionField<'a>>,
captured: &'a [CapturedIdent],
trailing_comma: bool,
},
/// `'enum' LIST('{', ',', '}', Ident)`
Enum {
pos: Pos,
variants: FieldList<'a, EnumField<'a>>,
captured: &'a [Ident],
captured: &'a [CapturedIdent],
trailing_comma: bool,
},
/// `[Expr] LIST('.{', ',', '}', Ident [':' Expr])`
@ -1024,8 +1092,9 @@ generate_expr! {
trailing_comma: bool,
},
/// `[Expr] LIST('.(', ',', ')', Ident [':' Expr])`
Tupl {
List {
pos: Pos,
kind: ListKind,
ty: Option<&'a Self>,
fields: &'a [Self],
trailing_comma: bool,
@ -1041,6 +1110,12 @@ generate_expr! {
base: &'a Self,
index: &'a Self,
},
/// `[ Expr ] .. [ Expr ]`
Range {
pos: u32,
start: Option<&'a Self>,
end: Option<&'a Self>,
},
/// `Expr '.' Ident`
Field {
target: &'a Self,
@ -1086,6 +1161,26 @@ generate_expr! {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord)]
pub struct CapturedIdent {
pub id: Ident,
pub is_ct: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ListKind {
Tuple,
Array,
}
impl ListKind {
fn term(self) -> TokenKind {
match self {
ListKind::Tuple => TokenKind::RParen,
ListKind::Array => TokenKind::RBrack,
}
}
}
impl Expr<'_> {
pub fn declares(&self, iden: DeclId, source: &str) -> Option<Ident> {
match *self {
@ -1158,11 +1253,25 @@ impl Poser for EnumField<'_> {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct UnionField<'a> {
pub pos: Pos,
pub name: &'a str,
pub ty: Expr<'a>,
}
impl Poser for UnionField<'_> {
fn posi(&self) -> Pos {
self.pos
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct StructField<'a> {
pub pos: Pos,
pub name: &'a str,
pub ty: Expr<'a>,
pub default_value: Option<Expr<'a>>,
}
impl Poser for StructField<'_> {
@ -1227,9 +1336,9 @@ pub enum CommentOr<'a, T> {
Comment { literal: &'a str, pos: Pos },
}
impl<T: Copy> CommentOr<'_, T> {
pub fn or(&self) -> Option<T> {
match *self {
impl<T> CommentOr<'_, T> {
pub fn or(&self) -> Option<&T> {
match self {
CommentOr::Or(v) => Some(v),
CommentOr::Comment { .. } => None,
}
@ -1260,7 +1369,7 @@ pub struct Ctx {
symbols: Vec<Symbol>,
stack: StackAlloc,
idents: Vec<ScopeIdent>,
captured: Vec<Ident>,
captured: Vec<CapturedIdent>,
}
impl Ctx {
@ -1414,7 +1523,7 @@ pub fn find_decl<'a>(
id: DeclId,
) -> Option<(&'a Expr<'a>, Ident)> {
exprs.iter().find_map(|expr| match expr {
Expr::BinOp { left, op: TokenKind::Decl, .. } => {
Expr::BinOp { left, op: TokenKind::Decl | TokenKind::Colon, .. } => {
left.declares(id, file).map(|id| (expr, id))
}
_ => None,

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@ use {
crate::{
ctx_map,
lexer::TokenKind,
parser::{self, CommentOr, Expr, ExprRef, Pos},
parser::{self, CapturedIdent, CommentOr, Expr, ExprRef, Pos},
utils::{self, Ent, EntSlice, EntVec},
Ident,
},
@ -38,9 +38,9 @@ pub type Offset = u32;
pub type Size = u32;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Default, PartialOrd, Ord)]
pub struct Tuple(pub u32);
pub struct List(pub u32);
impl Tuple {
impl List {
const LEN_BITS: u32 = 5;
const LEN_MASK: usize = Self::MAX_LEN - 1;
const MAX_LEN: usize = 1 << Self::LEN_BITS;
@ -104,6 +104,12 @@ impl ArgIter {
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub struct Id(NonZeroU32);
impl AsRef<Id> for Id {
fn as_ref(&self) -> &Id {
self
}
}
impl From<Id> for i64 {
fn from(value: Id) -> Self {
value.0.get() as _
@ -150,6 +156,7 @@ impl crate::ctx_map::CtxEntry for Id {
SymKey::Decl(gb.file.into(), gb.name)
}
Kind::Slice(s) => SymKey::Array(&ctx.slices[s]),
Kind::Tuple(t) => SymKey::Tuple(ctx.tuples[t].fields),
Kind::Module(_) | Kind::Builtin(_) => {
SymKey::Decl(Module::default().into(), Ident::INVALID)
}
@ -184,11 +191,13 @@ impl Id {
}
pub fn is_unsigned(self) -> bool {
matches!(self.repr(), U8..=UINT) || self.is_never()
matches!(self.repr(), U8..=UINT)
|| self.is_never()
|| matches!(self.expand(), Kind::Enum(_))
}
pub fn is_integer(self) -> bool {
matches!(self.repr(), U8..=INT) || self.is_never()
self.is_signed() || self.is_unsigned()
}
pub fn is_never(self) -> bool {
@ -264,22 +273,19 @@ impl Id {
}
pub(crate) fn loc(&self, tys: &Types) -> Loc {
use Kind as K;
match self.expand() {
Kind::Opt(o)
K::Opt(o)
if let ty = tys.ins.opts[o].base
&& ty.loc(tys) == Loc::Reg
&& (ty.is_pointer() || tys.size_of(ty) < 8) =>
{
Loc::Reg
}
Kind::Ptr(_) | Kind::Enum(_) | Kind::Builtin(_) => Loc::Reg,
Kind::Struct(_) | Kind::Union(_) if tys.size_of(*self) == 0 => Loc::Reg,
Kind::Struct(_) | Kind::Union(_) | Kind::Slice(_) | Kind::Opt(_) => Loc::Stack,
c @ (Kind::Func(_)
| Kind::Global(_)
| Kind::Module(_)
| Kind::Const(_)
| Kind::Template(_)) => {
K::Ptr(_) | K::Enum(_) | K::Builtin(_) => Loc::Reg,
K::Struct(_) | K::Tuple(_) | K::Union(_) if tys.size_of(*self) == 0 => Loc::Reg,
K::Struct(_) | K::Tuple(_) | K::Union(_) | K::Slice(_) | K::Opt(_) => Loc::Stack,
c @ (K::Func(_) | K::Global(_) | K::Module(_) | K::Const(_) | K::Template(_)) => {
unreachable!("{c:?}")
}
}
@ -380,6 +386,7 @@ builtin_type! {
INT;
F32;
F64;
ANY_TYPE;
}
macro_rules! type_kind {
@ -414,6 +421,12 @@ macro_rules! type_kind {
}
}
impl Id {
pub fn kind(self) -> u8 {
(self.repr() >> $name::FLAG_OFFSET) as _
}
}
$(
impl From<$variant> for $name {
fn from(value: $variant) -> Self {
@ -441,6 +454,7 @@ type_kind! {
pub enum Kind {
Builtin,
Struct,
Tuple,
Enum,
Union,
Ptr,
@ -449,8 +463,8 @@ type_kind! {
Func,
Template,
Global,
Module,
Const,
Module,
}
}
@ -505,31 +519,31 @@ impl<'a> Display<'a> {
impl core::fmt::Display for Display<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
use Kind as TK;
match TK::from_ty(self.ty) {
TK::Module(idx) => {
use Kind as K;
match K::from_ty(self.ty) {
K::Module(idx) => {
f.write_str("@use(\"")?;
self.files[idx].path.fmt(f)?;
f.write_str(")[")?;
idx.fmt(f)?;
f.write_str("]")
}
TK::Builtin(ty) => f.write_str(to_str(ty)),
TK::Opt(ty) => {
K::Builtin(ty) => f.write_str(to_str(ty)),
K::Opt(ty) => {
f.write_str("?")?;
self.rety(self.tys.ins.opts[ty].base).fmt(f)
}
TK::Ptr(ty) => {
K::Ptr(ty) => {
f.write_str("^")?;
self.rety(self.tys.ins.ptrs[ty].base).fmt(f)
}
TK::Struct(idx) => {
K::Struct(idx) => {
let record = &self.tys.ins.structs[idx];
if record.name.is_null() {
f.write_str("[")?;
idx.fmt(f)?;
f.write_str("]{")?;
for (i, &StructField { name, ty }) in
for (i, &StructField { name, ty, .. }) in
self.tys.struct_fields(idx).iter().enumerate()
{
if i != 0 {
@ -545,13 +559,25 @@ impl core::fmt::Display for Display<'_> {
f.write_str(file.ident_str(record.name))
}
}
TK::Union(idx) => {
K::Tuple(idx) => {
f.write_str(".(")?;
for (i, &ty) in
self.tys.ins.args[self.tys.ins.tuples[idx].fields.range()].iter().enumerate()
{
if i != 0 {
f.write_str(", ")?;
}
self.rety(ty).fmt(f)?;
}
f.write_str(")")
}
K::Union(idx) => {
let record = &self.tys.ins.unions[idx];
if record.name.is_null() {
f.write_str("[")?;
idx.fmt(f)?;
f.write_str("]{")?;
for (i, &StructField { name, ty }) in
for (i, &UnionField { name, ty }) in
self.tys.union_fields(idx).iter().enumerate()
{
if i != 0 {
@ -567,37 +593,36 @@ impl core::fmt::Display for Display<'_> {
f.write_str(file.ident_str(record.name))
}
}
TK::Enum(idx) => {
K::Enum(idx) => {
let enm = &self.tys.ins.enums[idx];
debug_assert!(!enm.name.is_null());
let file = &self.files[enm.file];
f.write_str(file.ident_str(enm.name))
}
TK::Func(idx) => {
K::Func(idx) => {
f.write_str("fn")?;
idx.fmt(f)
}
TK::Template(idx) => {
K::Template(idx) => {
f.write_str("fn")?;
idx.fmt(f)
}
TK::Global(idx) => {
K::Global(idx) => {
let global = &self.tys.ins.globals[idx];
let file = &self.files[global.file];
f.write_str(file.ident_str(global.name))?;
f.write_str(" (global)")
}
TK::Slice(idx) => {
K::Slice(idx) => {
let array = self.tys.ins.slices[idx];
f.write_str("[")?;
self.rety(array.elem).fmt(f)?;
if array.len != ArrayLen::MAX {
f.write_str("; ")?;
array.len.fmt(f)?;
if let Some(len) = array.len() {
len.fmt(f)?;
}
f.write_str("]")
f.write_str("]")?;
self.rety(array.elem).fmt(f)
}
TK::Const(idx) => {
K::Const(idx) => {
let cnst = &self.tys.ins.consts[idx];
let file = &self.files[cnst.file];
f.write_str(file.ident_str(cnst.name))?;
@ -609,9 +634,10 @@ impl core::fmt::Display for Display<'_> {
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub enum SymKey<'a> {
Tuple(List),
Pointer(&'a PtrData),
Optional(&'a OptData),
Type(Id, Pos, Tuple),
Type(Id, Pos, List),
Decl(Id, Ident),
Array(&'a ArrayData),
Constant(&'a ConstData),
@ -619,7 +645,7 @@ pub enum SymKey<'a> {
#[derive(Clone, Copy, Default)]
pub struct Sig {
pub args: Tuple,
pub args: List,
pub ret: Id,
}
@ -713,7 +739,7 @@ pub struct TypeBase {
pub pos: Pos,
pub name: Ident,
pub field_start: u32,
pub captured: Tuple,
pub captured: List,
pub ast: ExprRef,
}
@ -724,6 +750,11 @@ pub struct EnumData {
impl_deref!(EnumData { base: TypeBase });
pub struct UnionField {
pub name: Ident,
pub ty: Id,
}
#[derive(Default)]
pub struct UnionData {
pub base: TypeBase,
@ -736,6 +767,7 @@ impl_deref!(UnionData { base: TypeBase });
pub struct StructField {
pub name: Ident,
pub ty: Id,
pub default_value: Option<Const>,
}
#[derive(Default)]
@ -749,6 +781,13 @@ pub struct StructData {
impl_deref!(StructData { base: TypeBase });
#[derive(Default)]
pub struct TupleData {
pub fields: List,
pub size: Cell<Size>,
pub align: Cell<u8>,
}
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct OptData {
pub base: Id,
@ -818,6 +857,7 @@ impl IdentInterner {
#[derive(Default)]
pub struct TypesTmp {
pub struct_fields: Vec<StructField>,
pub union_fields: Vec<UnionField>,
pub enum_fields: Vec<EnumField>,
pub args: Vec<Id>,
}
@ -826,6 +866,7 @@ pub struct TypesTmp {
pub struct TypeIns {
pub args: Vec<Id>,
pub struct_fields: Vec<StructField>,
pub union_fields: Vec<UnionField>,
pub enum_fields: Vec<EnumField>,
pub funcs: EntVec<Func, FuncData>,
pub templates: EntVec<Template, TemplateData>,
@ -837,6 +878,7 @@ pub struct TypeIns {
pub ptrs: EntVec<Ptr, PtrData>,
pub opts: EntVec<Opt, OptData>,
pub slices: EntVec<Slice, ArrayData>,
pub tuples: EntVec<Tuple, TupleData>,
}
pub struct FTask {
@ -880,6 +922,7 @@ impl Types {
| Kind::Builtin(_)
| Kind::Ptr(_)
| Kind::Slice(_)
| Kind::Tuple(_)
| Kind::Opt(_) => utils::is_pascal_case,
Kind::Func(f)
if let &Expr::Closure { ret: &Expr::Ident { id, .. }, .. } =
@ -902,23 +945,23 @@ impl Types {
}
}
pub fn pack_args(&mut self, arg_base: usize) -> Option<Tuple> {
pub fn pack_args(&mut self, arg_base: usize) -> Option<List> {
let base = self.ins.args.len();
self.ins.args.extend(self.tmp.args.drain(arg_base..));
let needle = &self.ins.args[base..];
if needle.is_empty() {
return Some(Tuple::empty());
return Some(List::empty());
}
let len = needle.len();
// FIXME: maybe later when this becomes a bottleneck we use more
// efficient search (SIMD?, indexing?)
let sp = self.ins.args.windows(needle.len()).position(|val| val == needle).unwrap();
self.ins.args.truncate((sp + needle.len()).max(base));
Tuple::new(sp, len)
List::new(sp, len)
}
pub fn union_fields(&self, union: Union) -> &[StructField] {
&self.ins.struct_fields[self.union_field_range(union)]
pub fn union_fields(&self, union: Union) -> &[UnionField] {
&self.ins.union_fields[self.union_field_range(union)]
}
fn union_field_range(&self, union: Union) -> Range<usize> {
@ -927,7 +970,7 @@ impl Types {
.ins
.unions
.next(union)
.map_or(self.ins.struct_fields.len(), |s| s.field_start as usize);
.map_or(self.ins.union_fields.len(), |s| s.field_start as usize);
start..end
}
@ -935,7 +978,7 @@ impl Types {
&self.ins.struct_fields[self.struct_field_range(strct)]
}
fn struct_field_range(&self, strct: Struct) -> Range<usize> {
pub fn struct_field_range(&self, strct: Struct) -> Range<usize> {
let start = self.ins.structs[strct].field_start as usize;
let end = self
.ins
@ -997,6 +1040,16 @@ impl Types {
self.ins.structs[stru].size.set(oiter.offset);
oiter.offset
}
Kind::Tuple(tuple) => {
if self.ins.tuples[tuple].size.get() != 0 {
return self.ins.tuples[tuple].size.get();
}
let mut oiter = OffsetIter::new(tuple, self);
while oiter.next(self).is_some() {}
self.ins.tuples[tuple].size.set(oiter.offset);
oiter.offset
}
Kind::Union(union) => {
if self.ins.unions[union].size.get() != 0 {
return self.ins.unions[union].size.get();
@ -1016,8 +1069,12 @@ impl Types {
self.size_of(base) + self.align_of(base)
}
}
_ if let Some(size) = ty.simple_size() => size,
ty => unimplemented!("size_of: {:?}", ty),
Kind::Ptr(_) | Kind::Builtin(_) => ty.simple_size().unwrap(),
Kind::Func(_)
| Kind::Template(_)
| Kind::Global(_)
| Kind::Const(_)
| Kind::Module(_) => unreachable!(),
}
}
@ -1049,6 +1106,15 @@ impl Types {
self.ins.structs[stru].align.set(align.try_into().unwrap());
align
}
Kind::Tuple(tuple) => {
if self.ins.tuples[tuple].align.get() != 0 {
return self.ins.tuples[tuple].align.get() as _;
}
let align =
self.tuple_fields(tuple).iter().map(|&f| self.align_of(f)).max().unwrap_or(1);
self.ins.tuples[tuple].align.set(align.try_into().unwrap());
align
}
Kind::Slice(arr) => {
let arr = &self.ins.slices[arr];
match arr.len {
@ -1056,7 +1122,14 @@ impl Types {
_ => self.align_of(arr.elem),
}
}
_ => self.size_of(ty).max(1),
Kind::Opt(opt) => self.align_of(self.ins.opts[opt].base),
Kind::Builtin(_) | Kind::Enum(_) | Kind::Ptr(_) => self.size_of(ty),
Kind::Func(_)
| Kind::Template(_)
| Kind::Global(_)
| Kind::Const(_)
| Kind::Module(_) => unreachable!(),
//_ => self.size_of(ty).max(1),
}
}
@ -1103,7 +1176,7 @@ impl Types {
self.struct_fields(s).iter().position(|f| f.name == name)
}
pub fn find_union_field(&self, u: Union, name: &str) -> Option<(usize, &StructField)> {
pub fn find_union_field(&self, u: Union, name: &str) -> Option<(usize, &UnionField)> {
let name = self.names.project(name)?;
self.union_fields(u).iter().enumerate().find(|(_, f)| f.name == name)
}
@ -1118,10 +1191,14 @@ impl Types {
self.ins.globals.clear();
self.ins.structs.clear();
self.ins.struct_fields.clear();
self.ins.union_fields.clear();
self.ins.enum_fields.clear();
self.ins.ptrs.clear();
self.ins.slices.clear();
debug_assert_eq!(self.tmp.struct_fields.len(), 0);
debug_assert_eq!(self.tmp.union_fields.len(), 0);
debug_assert_eq!(self.tmp.enum_fields.len(), 0);
debug_assert_eq!(self.tmp.args.len(), 0);
debug_assert_eq!(self.tasks.len(), 0);
@ -1140,6 +1217,7 @@ impl Types {
| Kind::Template(_)
| Kind::Global(_)
| Kind::Module(_)
| Kind::Tuple(_)
| Kind::Const(_) => return None,
})
}
@ -1165,7 +1243,11 @@ impl Types {
self.type_base_of(ty).map(|b| b.parent)
}
pub fn captures_of<'a>(&self, ty: Id, file: &'a parser::Ast) -> Option<(&'a [Ident], Tuple)> {
pub fn captures_of<'a>(
&self,
ty: Id,
file: &'a parser::Ast,
) -> Option<(&'a [CapturedIdent], List)> {
let base = self.type_base_of(ty)?;
let (Expr::Struct { captured, .. }
@ -1181,10 +1263,28 @@ impl Types {
pub fn len_of(&self, ty: Id) -> Option<u32> {
Some(match ty.expand() {
Kind::Struct(s) => self.struct_field_range(s).len() as _,
Kind::Tuple(s) => self.ins.tuples[s].fields.len() as _,
Kind::Slice(s) => self.ins.slices[s].len()? as _,
_ => return None,
})
}
pub fn name_of(&self, ty: Id, files: &EntSlice<Module, parser::Ast>, data: &mut Vec<u8>) {
use core::fmt::Write;
let str = unsafe { core::mem::transmute::<&mut Vec<u8>, &mut String>(data) };
write!(str, "{}", Display::new(self, files, ty)).unwrap();
}
pub fn tuple_fields(&self, tuple: Tuple) -> &[Id] {
&self.ins.args[self.ins.tuples[tuple].fields.range()]
}
pub fn elem_of(&self, ty: Id) -> Option<Id> {
match ty.expand() {
Kind::Slice(s) => Some(self.ins.slices[s].elem),
_ => None,
}
}
}
pub struct OptLayout {
@ -1193,17 +1293,57 @@ pub struct OptLayout {
pub payload_offset: Offset,
}
pub struct OffsetIter {
strct: Struct,
pub trait Agregate: Copy {
type Field: AsRef<Id> + 'static;
fn fields(self, tys: &Types) -> Range<usize>;
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field;
fn align_override(self, _: &Types) -> Option<u8> {
None
}
}
impl Agregate for Tuple {
type Field = Id;
fn fields(self, tys: &Types) -> Range<usize> {
tys.ins.tuples[self].fields.range()
}
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field {
&tys.ins.args[index]
}
}
impl Agregate for Struct {
type Field = StructField;
fn fields(self, tys: &Types) -> Range<usize> {
tys.struct_field_range(self)
}
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field {
&tys.ins.struct_fields[index]
}
fn align_override(self, tys: &Types) -> Option<u8> {
tys.ins.structs[self].explicit_alignment
}
}
impl AsRef<Id> for StructField {
fn as_ref(&self) -> &Id {
&self.ty
}
}
pub struct OffsetIter<T> {
strct: T,
offset: Offset,
fields: Range<usize>,
}
impl OffsetIter {
pub fn new(strct: Struct, tys: &Types) -> Self {
Self { strct, offset: 0, fields: tys.struct_field_range(strct) }
}
impl OffsetIter<Struct> {
pub fn offset_of(tys: &Types, idx: Struct, field: &str) -> Option<(Offset, Id)> {
let field_id = tys.names.project(field)?;
OffsetIter::new(idx, tys)
@ -1211,25 +1351,33 @@ impl OffsetIter {
.find(|(f, _)| f.name == field_id)
.map(|(f, off)| (off, f.ty))
}
}
fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a StructField, Offset)> {
let stru = &tys.ins.structs[self.strct];
let field = &tys.ins.struct_fields[self.fields.next()?];
impl<T: Agregate> OffsetIter<T> {
pub fn new(strct: T, tys: &Types) -> Self {
Self { strct, offset: 0, fields: strct.fields(tys) }
}
let align = stru.explicit_alignment.map_or_else(|| tys.align_of(field.ty), |a| a as u32);
fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a T::Field, Offset)> {
let field = &T::field_by_idx(tys, self.fields.next()?);
let align = self
.strct
.align_override(tys)
.map_or_else(|| tys.align_of(*field.as_ref()), |a| a as u32);
self.offset = (self.offset + align - 1) & !(align - 1);
let off = self.offset;
self.offset += tys.size_of(field.ty);
self.offset += tys.size_of(*field.as_ref());
Some((field, off))
}
pub fn next_ty(&mut self, tys: &Types) -> Option<(Id, Offset)> {
let (field, off) = self.next(tys)?;
Some((field.ty, off))
Some((*field.as_ref(), off))
}
pub fn into_iter(mut self, tys: &Types) -> impl Iterator<Item = (&StructField, Offset)> {
pub fn into_iter(mut self, tys: &Types) -> impl Iterator<Item = (&T::Field, Offset)> {
core::iter::from_fn(move || self.next(tys))
}
}

View file

@ -1,4 +1,3 @@
#![expect(dead_code)]
use {
alloc::alloc,
core::{
@ -7,7 +6,7 @@ use {
hint::unreachable_unchecked,
marker::PhantomData,
mem::MaybeUninit,
ops::{Deref, DerefMut, Not},
ops::{Deref, DerefMut, Not, Range},
ptr::Unique,
},
};
@ -32,9 +31,10 @@ pub fn is_screaming_case(str: &str) -> Result<(), &'static str> {
}
type Nid = u16;
type BitSetUnit = usize;
pub union BitSet {
inline: usize,
inline: BitSetUnit,
alloced: Unique<AllocedBitSet>,
}
@ -78,9 +78,9 @@ impl Default for BitSet {
}
impl BitSet {
const FLAG: usize = 1 << (Self::UNIT - 1);
const FLAG: BitSetUnit = 1 << (Self::UNIT - 1);
const INLINE_ELEMS: usize = Self::UNIT - 1;
const UNIT: usize = core::mem::size_of::<usize>() * 8;
pub const UNIT: usize = core::mem::size_of::<BitSetUnit>() * 8;
pub fn with_capacity(len: usize) -> Self {
let mut s = Self::default();
@ -92,7 +92,7 @@ impl BitSet {
unsafe { self.inline & Self::FLAG != 0 }
}
fn data_and_len(&self) -> (&[usize], usize) {
fn data_and_len(&self) -> (&[BitSetUnit], usize) {
unsafe {
if self.is_inline() {
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
@ -100,16 +100,16 @@ impl BitSet {
let small_vec = self.alloced.as_ref();
(
core::slice::from_raw_parts(
&small_vec.data as *const _ as *const usize,
&small_vec.data as *const _ as *const BitSetUnit,
small_vec.cap,
),
small_vec.cap * core::mem::size_of::<usize>() * 8,
small_vec.cap * Self::UNIT,
)
}
}
}
fn data_mut_and_len(&mut self) -> (&mut [usize], usize) {
fn data_mut_and_len(&mut self) -> (&mut [BitSetUnit], usize) {
unsafe {
if self.is_inline() {
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
@ -117,7 +117,7 @@ impl BitSet {
let small_vec = self.alloced.as_mut();
(
core::slice::from_raw_parts_mut(
&mut small_vec.data as *mut _ as *mut usize,
&mut small_vec.data as *mut _ as *mut BitSetUnit,
small_vec.cap,
),
small_vec.cap * Self::UNIT,
@ -163,7 +163,7 @@ impl BitSet {
let (ptr, prev_len) = unsafe {
if self.is_inline() {
let ptr = alloc::alloc(layout);
*ptr.add(off).cast::<usize>() = self.inline & !Self::FLAG;
*ptr.add(off).cast::<BitSetUnit>() = self.inline & !Self::FLAG;
(ptr, 1)
} else {
let prev_len = self.alloced.as_ref().cap;
@ -174,7 +174,7 @@ impl BitSet {
unsafe {
MaybeUninit::fill(
core::slice::from_raw_parts_mut(
ptr.add(off).cast::<MaybeUninit<usize>>().add(prev_len),
ptr.add(off).cast::<MaybeUninit<BitSetUnit>>().add(prev_len),
slot_count - prev_len,
),
0,
@ -187,7 +187,7 @@ impl BitSet {
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
unsafe {
core::alloc::Layout::new::<AllocedBitSet>()
.extend(Layout::array::<usize>(slot_count).unwrap_unchecked())
.extend(Layout::array::<BitSetUnit>(slot_count).unwrap_unchecked())
.unwrap_unchecked()
}
}
@ -205,6 +205,10 @@ impl BitSet {
pub fn clear(&mut self, len: usize) {
self.reserve(len);
self.clear_as_is();
}
pub fn clear_as_is(&mut self) {
if self.is_inline() {
unsafe { self.inline &= Self::FLAG };
} else {
@ -212,7 +216,11 @@ impl BitSet {
}
}
pub fn units<'a>(&'a self, slot: &'a mut usize) -> &'a [usize] {
pub fn approx_unit_cap(&self) -> usize {
self.data_and_len().0.len()
}
pub fn units<'a>(&'a self, slot: &'a mut BitSetUnit) -> &'a [BitSetUnit] {
if self.is_inline() {
*slot = unsafe { self.inline } & !Self::FLAG;
core::slice::from_ref(slot)
@ -221,36 +229,47 @@ impl BitSet {
}
}
pub fn units_mut(&mut self) -> Option<&mut [BitSetUnit]> {
self.is_inline().not().then(|| self.data_mut_and_len().0)
}
pub fn reserve(&mut self, len: usize) {
if len > self.data_and_len().1 {
self.grow(len.next_power_of_two().max(4 * Self::UNIT));
}
}
pub fn units_mut(&mut self) -> Result<&mut [usize], &mut InlineBitSetView> {
if self.is_inline() {
Err(unsafe {
core::mem::transmute::<&mut usize, &mut InlineBitSetView>(&mut self.inline)
})
pub fn set_range(&mut self, proj_range: Range<usize>) {
if proj_range.is_empty() {
return;
}
self.reserve(proj_range.end);
let (units, _) = self.data_mut_and_len();
if proj_range.start / Self::UNIT == (proj_range.end - 1) / Self::UNIT {
debug_assert!(proj_range.len() <= Self::UNIT);
let mask = ((1 << proj_range.len()) - 1) << (proj_range.start % Self::UNIT);
units[proj_range.start / Self::UNIT] |= mask;
} else {
Ok(self.data_mut_and_len().0)
}
}
}
let fill_range = proj_range.start.div_ceil(Self::UNIT)..proj_range.end / Self::UNIT;
units[fill_range].fill(BitSetUnit::MAX);
pub struct InlineBitSetView(usize);
let prefix_len = Self::UNIT - proj_range.start % Self::UNIT;
let prefix_mask = ((1 << prefix_len) - 1) << (proj_range.start % Self::UNIT);
units[proj_range.start / Self::UNIT] |= prefix_mask;
impl InlineBitSetView {
pub(crate) fn add_mask(&mut self, tmp: usize) {
debug_assert!(tmp & BitSet::FLAG == 0);
self.0 |= tmp;
let postfix_len = proj_range.end % Self::UNIT;
let postfix_mask = (1 << postfix_len) - 1;
units[proj_range.end / Self::UNIT] |= postfix_mask;
}
}
}
pub struct BitSetIter<'a> {
index: usize,
current: usize,
remining: &'a [usize],
current: BitSetUnit,
remining: &'a [BitSetUnit],
}
impl Iterator for BitSetIter<'_> {
@ -270,7 +289,7 @@ impl Iterator for BitSetIter<'_> {
struct AllocedBitSet {
cap: usize,
data: [usize; 0],
data: [BitSetUnit; 0],
}
#[cfg(test)]

View file

@ -5,8 +5,8 @@ main:
ADDI64 r254, r254, -24d
ST r31, r254, 0a, 24h
JAL r31, r0, :cond
CP r33, r1
CP r32, r0
CP r33, r1
JNE r33, r32, :0
JMP :1
0: LI64 r32, 2d

View file

@ -1,5 +1,11 @@
main:
fun:
UN
code size: 9
main:
ADDI64 r254, r254, -8d
ST r31, r254, 0a, 8h
JAL r31, r0, :fun
LD r31, r254, 0a, 8h
ADDI64 r254, r254, 8d
code size: 64
ret: 0
status: Err(Unreachable)

View file

@ -48,10 +48,9 @@ main:
JMP :1
6: CP r1, r0
JMP :1
5: ADDI64 r34, r32, 16d
ST r0, r32, 0a, 8h
5: ST r0, r32, 0a, 8h
ST r0, r32, 8a, 8h
CP r32, r34
ADDI64 r32, r32, 16d
JMP :7
3: JAL r31, r0, :new_stru
ST r1, r32, 0a, 16h
@ -67,6 +66,6 @@ new_stru:
LD r1, r254, 0a, 16h
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
code size: 658
code size: 655
ret: 0
status: Ok(())

View file

@ -1,10 +1,10 @@
continue_and_state_change:
CP r13, r2
CP r15, r0
LI64 r16, 3d
LI64 r14, 4d
LI64 r17, 2d
LI64 r18, 10d
CP r15, r0
LI64 r14, 4d
6: JLTU r13, r18, :0
JMP :1
0: JNE r13, r17, :2
@ -37,41 +37,41 @@ main:
ST r31, r254, 0a, 40h
CP r2, r0
JAL r31, r0, :multiple_breaks
CP r32, r1
LI64 r33, 3d
JEQ r32, r33, :0
LI64 r32, 3d
CP r33, r1
JEQ r33, r32, :0
LI64 r32, 1d
CP r1, r32
JMP :1
0: LI64 r32, 4d
CP r2, r32
0: LI64 r33, 4d
CP r2, r33
JAL r31, r0, :multiple_breaks
CP r34, r1
LI64 r35, 10d
JEQ r34, r35, :2
LI64 r34, 10d
CP r35, r1
JEQ r35, r34, :2
LI64 r32, 2d
CP r1, r32
JMP :1
2: CP r2, r0
JAL r31, r0, :state_change_in_break
CP r34, r1
JEQ r34, r0, :3
CP r1, r33
JMP :1
3: CP r2, r32
JAL r31, r0, :state_change_in_break
CP r34, r1
JEQ r34, r35, :4
CP r35, r1
JEQ r35, r0, :3
CP r1, r32
JMP :1
4: CP r2, r35
3: CP r2, r33
JAL r31, r0, :state_change_in_break
CP r35, r1
JEQ r35, r34, :4
CP r1, r33
JMP :1
4: CP r2, r34
JAL r31, r0, :continue_and_state_change
CP r32, r1
JEQ r32, r35, :5
CP r33, r1
JEQ r33, r34, :5
LI64 r32, 5d
CP r1, r32
JMP :1
5: CP r2, r33
5: CP r2, r32
JAL r31, r0, :continue_and_state_change
CP r32, r1
JEQ r32, r0, :6

View file

@ -42,10 +42,11 @@ free:
CP r4, r14
CP r5, r15
ECA
CP r13, r1
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -96d
ST r31, r254, 48a, 48h
ADDI64 r254, r254, -88d
ST r31, r254, 48a, 40h
ADDI64 r32, r254, 24d
CP r1, r32
JAL r31, r0, :new
@ -60,20 +61,19 @@ main:
CP r2, r33
CP r3, r34
JAL r31, r0, :push
CP r34, r1
LD r35, r254, 0a, 8h
LD r35, r35, 0a, 1h
LD r36, r254, 24a, 8h
LD r34, r36, 0a, 8h
LD r34, r254, 0a, 8h
LD r34, r34, 0a, 1h
LD r35, r254, 24a, 8h
LD r35, r35, 0a, 8h
CP r2, r33
JAL r31, r0, :deinit
CP r2, r32
JAL r31, r0, :deinit
ANDI r32, r35, 255d
ADD64 r32, r34, r32
ANDI r32, r34, 255d
ADD64 r32, r35, r32
CP r1, r32
LD r31, r254, 48a, 48h
ADDI64 r254, r254, 96d
LD r31, r254, 48a, 40h
ADDI64 r254, r254, 88d
JALA r0, r31, 0a
malloc:
CP r13, r2
@ -112,51 +112,49 @@ new:
push:
ADDI64 r254, r254, -80d
ST r31, r254, 0a, 80h
CP r38, r2
CP r39, r3
LI64 r37, 1d
LD r33, r38, 8a, 8h
LD r32, r38, 16a, 8h
CP r36, r2
CP r37, r3
LI64 r35, 1d
LD r33, r36, 8a, 8h
LD r32, r36, 16a, 8h
JNE r32, r33, :0
JNE r32, r0, :1
CP r32, r37
CP r32, r35
JMP :2
1: MULI64 r32, r32, 2d
2: CP r2, r32
CP r3, r37
CP r3, r35
JAL r31, r0, :malloc
CP r35, r1
ST r32, r38, 16a, 8h
JNE r35, r0, :3
ST r32, r36, 16a, 8h
CP r34, r1
JNE r34, r0, :3
CP r1, r0
JMP :4
3: LD r32, r38, 0a, 8h
ADD64 r40, r33, r32
CP r34, r35
7: LD r33, r38, 0a, 8h
LD r36, r38, 8a, 8h
JNE r40, r32, :5
JEQ r36, r0, :6
CP r2, r33
CP r3, r36
CP r4, r37
3: LD r32, r36, 0a, 8h
ADD64 r38, r33, r32
CP r33, r34
7: LD r39, r36, 0a, 8h
LD r40, r36, 8a, 8h
JNE r38, r32, :5
JEQ r40, r0, :6
CP r2, r39
CP r3, r40
CP r4, r35
JAL r31, r0, :free
JMP :6
6: ST r35, r38, 0a, 8h
6: ST r34, r36, 0a, 8h
JMP :0
5: ADDI64 r36, r34, 1d
ADDI64 r33, r32, 1d
LD r32, r32, 0a, 1h
ST r32, r34, 0a, 1h
CP r32, r33
CP r34, r36
JMP :7
0: LD r32, r38, 8a, 8h
LD r33, r38, 0a, 8h
ADD64 r33, r32, r33
5: LD r39, r32, 0a, 1h
ST r39, r33, 0a, 1h
ADD64 r32, r32, r37
ST r32, r38, 8a, 8h
ADDI64 r33, r33, 1d
ADDI64 r32, r32, 1d
JMP :7
0: LD r32, r36, 8a, 8h
LD r33, r36, 0a, 8h
ADD64 r33, r32, r33
ST r37, r33, 0a, 1h
ADD64 r32, r32, r35
ST r32, r36, 8a, 8h
CP r1, r33
4: LD r31, r254, 0a, 80h
ADDI64 r254, r254, 80d
@ -164,60 +162,58 @@ push:
push:
ADDI64 r254, r254, -88d
ST r31, r254, 0a, 88h
CP r38, r2
CP r39, r3
LI64 r37, 1d
LD r33, r38, 8a, 8h
LD r32, r38, 16a, 8h
CP r36, r2
CP r37, r3
LI64 r35, 1d
LD r33, r36, 8a, 8h
LD r32, r36, 16a, 8h
JNE r32, r33, :0
JNE r32, r0, :1
CP r32, r37
CP r32, r35
JMP :2
1: MULI64 r32, r32, 2d
2: LI64 r40, 8d
MUL64 r34, r32, r40
2: LI64 r38, 8d
MUL64 r34, r32, r38
CP r2, r34
CP r3, r40
CP r3, r38
JAL r31, r0, :malloc
CP r35, r1
ST r32, r38, 16a, 8h
JNE r35, r0, :3
ST r32, r36, 16a, 8h
CP r34, r1
JNE r34, r0, :3
CP r1, r0
JMP :4
3: MULI64 r33, r33, 8d
LD r32, r38, 0a, 8h
ADD64 r41, r32, r33
CP r34, r35
7: LD r33, r38, 0a, 8h
LD r36, r38, 8a, 8h
JNE r41, r32, :5
JEQ r36, r0, :6
MUL64 r32, r36, r40
CP r2, r33
LD r32, r36, 0a, 8h
ADD64 r39, r32, r33
CP r33, r34
7: LD r40, r36, 0a, 8h
LD r41, r36, 8a, 8h
JNE r39, r32, :5
JEQ r41, r0, :6
MUL64 r32, r41, r38
CP r2, r40
CP r3, r32
CP r4, r40
CP r4, r38
JAL r31, r0, :free
JMP :6
6: ST r35, r38, 0a, 8h
6: ST r34, r36, 0a, 8h
JMP :0
5: ADDI64 r36, r34, 8d
ADDI64 r33, r32, 8d
LD r32, r32, 0a, 8h
ST r32, r34, 0a, 8h
CP r32, r33
CP r34, r36
5: LD r40, r32, 0a, 8h
ST r40, r33, 0a, 8h
ADDI64 r33, r33, 8d
ADDI64 r32, r32, 8d
JMP :7
0: LD r32, r38, 8a, 8h
0: LD r32, r36, 8a, 8h
MULI64 r33, r32, 8d
LD r34, r38, 0a, 8h
LD r34, r36, 0a, 8h
ADD64 r33, r34, r33
ST r39, r33, 0a, 8h
ADD64 r32, r32, r37
ST r32, r38, 8a, 8h
ST r37, r33, 0a, 8h
ADD64 r32, r32, r35
ST r32, r36, 8a, 8h
CP r1, r33
4: LD r31, r254, 0a, 88h
ADDI64 r254, r254, 88d
JALA r0, r31, 0a
code size: 1635
code size: 1623
ret: 69
status: Ok(())

View file

@ -2,8 +2,8 @@ inb:
CP r1, r0
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -32d
ST r31, r254, 0a, 32h
ADDI64 r254, r254, -24d
ST r31, r254, 0a, 24h
LRA r32, r0, :ports
LD r33, r32, 0a, 1h
ANDI r33, r33, 255d
@ -11,12 +11,12 @@ main:
JMP :1
0: JAL r31, r0, :inb
CP r33, r1
CMPU r34, r33, r0
CMPUI r34, r34, 0d
NOT r34, r34
ST r34, r32, 0a, 1h
1: LD r31, r254, 0a, 32h
ADDI64 r254, r254, 32d
CMPU r33, r33, r0
CMPUI r33, r33, 0d
NOT r33, r33
ST r33, r32, 0a, 1h
1: LD r31, r254, 0a, 24h
ADDI64 r254, r254, 24d
JALA r0, r31, 0a
code size: 164
ret: 0

View file

@ -1,21 +1,20 @@
main:
ADDI64 r254, r254, -128d
ADDI64 r15, r254, 0d
LI8 r16, 69b
LI64 r17, 128d
ADDI64 r14, r254, 0d
LI8 r15, 69b
LI64 r16, 128d
CP r13, r0
2: LD r14, r254, 42a, 1h
JLTU r13, r17, :0
ANDI r13, r14, 255d
2: LD r17, r254, 42a, 1h
JLTU r13, r16, :0
ANDI r13, r17, 255d
CP r1, r13
JMP :1
0: ADDI64 r14, r13, 1d
ADD64 r13, r15, r13
ST r16, r13, 0a, 1h
CP r13, r14
0: ADD64 r17, r14, r13
ST r15, r17, 0a, 1h
ADDI64 r13, r13, 1d
JMP :2
1: ADDI64 r254, r254, 128d
JALA r0, r31, 0a
code size: 141
code size: 138
ret: 69
status: Ok(())

View file

@ -59,9 +59,9 @@ put_filled_rect:
LD r14, r14, 0a, 8h
ADD64 r26, r14, r26
LD r28, r15, 0a, 8h
MUL64 r15, r27, r25
ADD64 r14, r14, r15
ADD64 r15, r28, r26
MUL64 r25, r27, r25
ADD64 r14, r14, r25
ADD64 r14, r28, r14
3: JGTU r13, r20, :0
JNE r13, r20, :1

View file

@ -6,9 +6,9 @@ integer_range:
CP r2, r16
CP r3, r15
ECA
CP r15, r1
SUB64 r14, r14, r13
ADDI64 r14, r14, 1d
CP r15, r1
DIRU64 r0, r14, r15, r14
ADD64 r13, r14, r13
CP r1, r13

View file

@ -10,26 +10,26 @@ decide:
ADDI64 r254, r254, 24d
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -120d
ST r31, r254, 72a, 48h
ADDI64 r254, r254, -104d
ST r31, r254, 72a, 32h
ADDI64 r32, r254, 48d
CP r1, r32
CP r2, r0
JAL r31, r0, :decide
ADDI64 r34, r254, 24d
BMC r32, r34, 24h
LI64 r35, 1d
CP r1, r34
CP r2, r35
ADDI64 r33, r254, 24d
BMC r32, r33, 24h
LI64 r34, 1d
CP r1, r33
CP r2, r34
JAL r31, r0, :decide
ADDI64 r36, r254, 0d
BMC r32, r36, 24h
ADDI64 r34, r254, 0d
BMC r32, r34, 24h
LD r32, r254, 24a, 8h
LD r33, r254, 0a, 8h
ADD64 r32, r33, r32
CP r1, r32
LD r31, r254, 72a, 48h
ADDI64 r254, r254, 120d
LD r31, r254, 72a, 32h
ADDI64 r254, r254, 104d
JALA r0, r31, 0a
code size: 273
ret: 1

View file

@ -3,15 +3,15 @@ main:
ST r31, r254, 32a, 40h
LRA r32, r0, :"Goodbye, World!\0"
LRA r33, r0, :"Hello, World!\0"
ST r32, r254, 8a, 8h
ST r32, r254, 16a, 8h
ST r33, r254, 24a, 8h
LD r2, r254, 24a, 8h
LD r3, r254, 8a, 8h
LD r3, r254, 16a, 8h
JAL r31, r0, :print
ADDI64 r34, r254, 0d
ADDI64 r35, r254, 16d
ST r32, r254, 0a, 8h
ST r33, r254, 16a, 8h
ADDI64 r34, r254, 8d
ADDI64 r35, r254, 0d
ST r32, r254, 8a, 8h
ST r33, r254, 0a, 8h
CP r2, r35
CP r3, r34
JAL r31, r0, :print2

View file

@ -1,6 +1,6 @@
main:
ADDI64 r254, r254, -66d
ST r31, r254, 26a, 40h
ADDI64 r254, r254, -58d
ST r31, r254, 26a, 32h
JAL r31, r0, :returner_fn
CP r32, r1
ADDI64 r33, r254, 2d
@ -25,8 +25,8 @@ main:
JMP :1
0: LI64 r32, 1d
CP r1, r32
1: LD r31, r254, 26a, 40h
ADDI64 r254, r254, 66d
1: LD r31, r254, 26a, 32h
ADDI64 r254, r254, 58d
JALA r0, r31, 0a
returner_bn:
ADDI64 r254, r254, -24d

View file

@ -3,19 +3,19 @@ decide:
CP r1, r13
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -128d
ST r31, r254, 80a, 48h
ADDI64 r254, r254, -144d
ST r31, r254, 96a, 48h
JAL r31, r0, :decide
CP r32, r1
CP r33, r0
ADDI64 r34, r254, 72d
ADDI64 r34, r254, 88d
CP r32, r1
ANDI r32, r32, 255d
JNE r32, r0, :0
CP r32, r33
JMP :1
0: CP r32, r34
1: LI64 r35, 1d
ST r35, r254, 72a, 8h
ST r35, r254, 88a, 8h
JNE r32, r33, :2
LI64 r32, 9001d
CP r1, r32
@ -25,20 +25,20 @@ main:
ANDI r33, r33, 255d
JNE r33, r0, :4
LI8 r33, 1b
ST r33, r254, 56a, 1h
ST r33, r254, 72a, 1h
LD r32, r32, 0a, 8h
ST r32, r254, 64a, 8h
ST r32, r254, 80a, 8h
JMP :5
4: ST r0, r254, 56a, 1h
5: LD r32, r254, 56a, 1h
4: ST r0, r254, 72a, 1h
5: LD r32, r254, 72a, 1h
ANDI r32, r32, 255d
JEQ r32, r0, :6
LI64 r32, 42d
CP r1, r32
JMP :3
6: JAL r31, r0, :decide
CP r32, r1
CP r33, r0
CP r32, r1
ANDI r32, r32, 255d
JNE r32, r0, :7
CP r32, r33
@ -50,17 +50,20 @@ main:
LI64 r32, 69d
CP r1, r32
JMP :3
9: ADDI64 r33, r254, 40d
9: ADDI64 r33, r254, 56d
JAL r31, r0, :new_foo
ST r1, r33, 0a, 16h
LD r36, r254, 40a, 8h
LD r36, r254, 56a, 8h
JNE r36, r0, :10
LI64 r32, 999d
CP r1, r32
JMP :3
10: LRA r36, r0, :"foo\0"
ST r36, r254, 40a, 8h
LI64 r36, 4d
ST r36, r254, 48a, 8h
LD r2, r33, 0a, 16h
CP r4, r36
LD r4, r254, 40a, 16h
JAL r31, r0, :use_foo
ADDI64 r33, r254, 0d
JAL r31, r0, :no_foo
@ -98,8 +101,8 @@ main:
ANDI r32, r32, 65535d
SUB64 r32, r32, r33
CP r1, r32
3: LD r31, r254, 80a, 48h
ADDI64 r254, r254, 128d
3: LD r31, r254, 96a, 48h
ADDI64 r254, r254, 144d
JALA r0, r31, 0a
new_bar:
ADDI64 r254, r254, -24d
@ -129,11 +132,13 @@ no_foo:
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
use_foo:
ADDI64 r254, r254, -16d
ST r2, r254, 0a, 16h
ADDI64 r2, r254, 0d
ADDI64 r254, r254, 16d
ADDI64 r254, r254, -32d
ST r2, r254, 16a, 16h
ADDI64 r2, r254, 16d
ST r4, r254, 0a, 16h
ADDI64 r4, r254, 0d
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
code size: 1092
code size: 1162
ret: 0
status: Ok(())

View file

@ -1,6 +1,6 @@
main:
ADDI64 r254, r254, -64d
ST r31, r254, 24a, 40h
ADDI64 r254, r254, -56d
ST r31, r254, 24a, 32h
ADDI64 r32, r254, 0d
LI64 r33, 1d
ST r33, r254, 16a, 8h
@ -9,14 +9,14 @@ main:
ST r33, r254, 8a, 8h
JAL r31, r0, :opaque
ST r1, r32, 0a, 16h
LD r34, r254, 8a, 8h
LD r35, r254, 16a, 8h
ADD64 r34, r35, r34
LD r33, r254, 8a, 8h
LD r34, r254, 16a, 8h
ADD64 r33, r34, r33
LD r32, r254, 0a, 8h
SUB64 r32, r32, r34
SUB64 r32, r32, r33
CP r1, r32
LD r31, r254, 24a, 40h
ADDI64 r254, r254, 64d
LD r31, r254, 24a, 32h
ADDI64 r254, r254, 56d
JALA r0, r31, 0a
opaque:
ADDI64 r254, r254, -16d

View file

@ -0,0 +1,6 @@
main:
CP r1, r0
JALA r0, r31, 0a
code size: 22
ret: 0
status: Ok(())

View file

@ -1,23 +1,23 @@
main:
ADDI64 r254, r254, -52d
ST r31, r254, 4a, 48h
ADDI64 r254, r254, -44d
ST r31, r254, 4a, 40h
ADDI64 r32, r254, 0d
JAL r31, r0, :random_color
ST r1, r32, 0a, 4h
LD r34, r254, 0a, 1h
LD r35, r254, 1a, 1h
LD r36, r254, 2a, 1h
LD r33, r254, 0a, 1h
LD r34, r254, 1a, 1h
LD r35, r254, 2a, 1h
ANDI r33, r33, 255d
ANDI r34, r34, 255d
ANDI r35, r35, 255d
LD r32, r254, 3a, 1h
ANDI r33, r36, 255d
ADD64 r34, r35, r34
ANDI r32, r32, 255d
ANDI r35, r35, 255d
ADD64 r33, r34, r33
ANDI r32, r32, 255d
ADD64 r33, r33, r35
ADD64 r32, r33, r32
CP r1, r32
LD r31, r254, 4a, 48h
ADDI64 r254, r254, 52d
LD r31, r254, 4a, 40h
ADDI64 r254, r254, 44d
JALA r0, r31, 0a
random_color:
LRA r13, r0, :white

View file

@ -0,0 +1,8 @@
main:
LRA r13, r0, :a
LD r13, r13, 0a, 8h
CP r1, r13
JALA r0, r31, 0a
code size: 50
ret: 0
status: Ok(())

View file

@ -0,0 +1,27 @@
main:
ADDI64 r254, r254, -40d
ST r0, r254, 0a, 8h
LI64 r13, 1d
ST r13, r254, 8a, 8h
LI64 r13, 2d
ST r13, r254, 16a, 8h
LI64 r13, 3d
LI64 r14, 10d
ST r13, r254, 24a, 8h
ST r14, r254, 32a, 8h
LD r13, r254, 0a, 8h
LD r14, r254, 8a, 8h
ADD64 r13, r14, r13
LD r14, r254, 16a, 8h
ADD64 r13, r14, r13
LD r14, r254, 24a, 8h
ADD64 r13, r14, r13
LD r14, r254, 32a, 8h
ADDI64 r13, r13, 4d
SUB64 r13, r13, r14
CP r1, r13
ADDI64 r254, r254, 40d
JALA r0, r31, 0a
code size: 241
ret: 0
status: Ok(())

View file

@ -11,9 +11,9 @@ main:
JALA r0, r31, 0a
sqrt:
CP r14, r2
CP r17, r0
LI64 r16, 15d
LI64 r15, 32768d
CP r17, r0
CP r13, r17
3: JNE r15, r17, :0
CP r1, r13

View file

@ -5,8 +5,8 @@ do_stuff:
just_read:
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -80d
ST r31, r254, 48a, 32h
ADDI64 r254, r254, -72d
ST r31, r254, 48a, 24h
ADDI64 r32, r254, 16d
CP r1, r32
JAL r31, r0, :optionala
@ -37,8 +37,8 @@ main:
CP r33, r1
ADD64 r32, r33, r32
CP r1, r32
1: LD r31, r254, 48a, 32h
ADDI64 r254, r254, 80d
1: LD r31, r254, 48a, 24h
ADDI64 r254, r254, 72d
JALA r0, r31, 0a
optional:
ADDI64 r254, r254, -16d

View file

@ -0,0 +1,9 @@
main:
LRA r13, r0, :"abcdefshijklmnop\0"
LD r13, r13, 0a, 1h
ANDI r13, r13, 255d
CP r1, r13
JALA r0, r31, 0a
code size: 70
ret: 97
status: Ok(())

View file

@ -1,45 +1,44 @@
main:
ADDI64 r254, r254, -40d
LI64 r17, 1d
LI64 r16, 1d
LI64 r15, 4d
ADDI64 r17, r254, 0d
CP r14, r0
ADDI64 r18, r254, 0d
CP r13, r14
6: JNE r13, r15, :0
ADDI64 r19, r254, 32d
LI64 r20, 2d
ADDI64 r18, r254, 32d
LI64 r19, 2d
CP r13, r14
4: LD r15, r254, 16a, 8h
JNE r13, r17, :1
JNE r13, r16, :1
CP r1, r15
JMP :2
1: ADD64 r16, r13, r17
SUB64 r15, r20, r16
MUL64 r21, r15, r20
MUL64 r22, r13, r20
1: ADD64 r15, r13, r16
SUB64 r20, r19, r15
MUL64 r20, r20, r19
MUL64 r21, r13, r19
CP r13, r14
5: JNE r13, r20, :3
CP r13, r16
JMP :4
3: ADD64 r15, r13, r17
ADD64 r23, r22, r13
ADD64 r13, r21, r13
MULI64 r23, r23, 8d
MULI64 r13, r13, 8d
ADD64 r23, r18, r23
ADD64 r13, r18, r13
BMC r23, r19, 8h
BMC r13, r23, 8h
BMC r19, r13, 8h
5: JNE r13, r19, :3
CP r13, r15
JMP :4
3: ADD64 r22, r21, r13
ADD64 r23, r20, r13
MULI64 r22, r22, 8d
MULI64 r23, r23, 8d
ADD64 r22, r17, r22
ADD64 r23, r17, r23
BMC r22, r18, 8h
BMC r23, r22, 8h
BMC r18, r23, 8h
ADD64 r13, r13, r16
JMP :5
0: MULI64 r16, r13, 8d
ADD64 r16, r18, r16
ST r13, r16, 0a, 8h
ADD64 r13, r13, r17
0: MULI64 r18, r13, 8d
ADD64 r18, r17, r18
ST r13, r18, 0a, 8h
ADD64 r13, r13, r16
JMP :6
2: ADDI64 r254, r254, 40d
JALA r0, r31, 0a
code size: 267
code size: 264
ret: 0
status: Ok(())

View file

@ -9,8 +9,8 @@ foo:
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -88d
ST r31, r254, 48a, 40h
ADDI64 r254, r254, -80d
ST r31, r254, 48a, 32h
ADDI64 r32, r254, 32d
JAL r31, r0, :foo
ST r1, r32, 0a, 16h
@ -30,8 +30,8 @@ main:
LI64 r33, 7d
SUB64 r32, r33, r32
CP r1, r32
LD r31, r254, 48a, 40h
ADDI64 r254, r254, 88d
LD r31, r254, 48a, 32h
ADDI64 r254, r254, 80d
JALA r0, r31, 0a
code size: 347
ret: 0

View file

@ -0,0 +1,12 @@
main:
LRA r13, r0, :"abcd\0"
ADDI64 r13, r13, 1d
LI64 r14, 37d
CP r2, r14
CP r3, r13
ECA
JALA r0, r31, 0a
bcd
code size: 59
ret: 0
status: Ok(())

View file

@ -1,28 +1,26 @@
main:
ADDI64 r254, r254, -10240d
LI8 r15, 64b
LI64 r16, 1024d
LI8 r14, 64b
LI64 r15, 1024d
ADDI64 r16, r254, 0d
CP r13, r0
ADDI64 r17, r254, 0d
4: JLTU r13, r16, :0
ADDI64 r13, r17, 1024d
ADDI64 r15, r17, 10240d
3: LD r14, r254, 2048a, 1h
JLTU r13, r15, :1
ANDI r13, r14, 255d
4: JLTU r13, r15, :0
ADDI64 r14, r16, 10240d
ADDI64 r13, r16, 1024d
3: LD r15, r254, 2048a, 1h
JLTU r13, r14, :1
ANDI r13, r15, 255d
CP r1, r13
JMP :2
1: ADDI64 r14, r13, 1024d
BMC r17, r13, 1024h
CP r13, r14
1: BMC r16, r13, 1024h
ADDI64 r13, r13, 1024d
JMP :3
0: ADDI64 r14, r13, 1d
ADD64 r13, r17, r13
ST r15, r13, 0a, 1h
CP r13, r14
0: ADD64 r17, r16, r13
ST r14, r17, 0a, 1h
ADDI64 r13, r13, 1d
JMP :4
2: ADDI64 r254, r254, 10240d
JALA r0, r31, 0a
code size: 192
code size: 186
ret: 64
status: Ok(())

View file

@ -1,10 +1,10 @@
main:
ADDI64 r254, r254, -64d
ST r31, r254, 0a, 64h
CP r34, r0
LI64 r37, 65536d
LI8 r35, 1b
CP r36, r0
CP r34, r0
LI8 r35, 1b
CP r32, r36
7: JAL r31, r0, :opaque
CP r33, r1

View file

@ -0,0 +1,6 @@
main:
CP r1, r0
JALA r0, r31, 0a
code size: 22
ret: 0
status: Ok(())

View file

@ -1,46 +1,46 @@
main:
ADDI64 r254, r254, -48d
ST r31, r254, 16a, 32h
ADDI64 r254, r254, -40d
ST r31, r254, 16a, 24h
ADDI64 r32, r254, 0d
CP r3, r0
CP r4, r0
JAL r31, r0, :maina
ST r1, r32, 0a, 16h
LD r34, r254, 12a, 1h
LD r33, r254, 12a, 1h
LD r32, r254, 3a, 1h
SUB8 r32, r32, r34
SUB8 r32, r32, r33
ANDI r32, r32, 255d
CP r1, r32
LD r31, r254, 16a, 32h
ADDI64 r254, r254, 48d
LD r31, r254, 16a, 24h
ADDI64 r254, r254, 40d
JALA r0, r31, 0a
maina:
ADDI64 r254, r254, -52d
ST r31, r254, 20a, 32h
ADDI64 r254, r254, -44d
ST r31, r254, 20a, 24h
ADDI64 r32, r254, 16d
JAL r31, r0, :small_struct
ST r1, r32, 0a, 4h
ST r0, r254, 0a, 1h
ST r0, r254, 1a, 1h
ST r0, r254, 2a, 1h
LI8 r33, 3b
ST r33, r254, 3a, 1h
LI8 r34, 1b
ST r34, r254, 4a, 1h
LI8 r32, 3b
ST r32, r254, 3a, 1h
LI8 r33, 1b
ST r33, r254, 4a, 1h
ST r0, r254, 5a, 1h
ST r0, r254, 6a, 1h
ST r0, r254, 7a, 1h
ST r0, r254, 8a, 1h
ST r0, r254, 9a, 1h
ST r0, r254, 10a, 1h
ST r33, r254, 11a, 1h
ST r34, r254, 12a, 1h
ST r32, r254, 11a, 1h
ST r33, r254, 12a, 1h
ST r0, r254, 13a, 1h
ST r0, r254, 14a, 1h
ST r0, r254, 15a, 1h
LD r1, r254, 0a, 16h
LD r31, r254, 20a, 32h
ADDI64 r254, r254, 52d
LD r31, r254, 20a, 24h
ADDI64 r254, r254, 44d
JALA r0, r31, 0a
small_struct:
ADDI64 r254, r254, -4d

0
smh.hb Normal file
View file

View file

@ -3,10 +3,12 @@ name = "hbvm"
version = "0.1.0"
edition = "2021"
[dependencies]
hbbytecode = { workspace = true }
[features]
default = ["alloc"]
disasm = ["hbbytecode/disasm", "alloc"]
alloc = []
nightly = []
[dependencies]
hbbytecode = { workspace = true }

View file

@ -4,7 +4,7 @@ pub mod softpaging;
pub(crate) mod addr;
use crate::utils::impl_display;
use crate::{utils::impl_display, value::Value};
pub use addr::Address;
/// Load-store memory access
@ -36,6 +36,50 @@ pub trait Memory {
/// # Safety
/// - Data read have to be valid
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: Address) -> T;
/// Log instruction to be executed
fn log_instr(&mut self, _at: Address, _regs: &[Value]) {}
}
#[cfg(feature = "alloc")]
#[derive(Default)]
pub struct InstrLogger {
#[cfg(debug_assertions)]
op_buf: alloc::vec::Vec<hbbytecode::Oper>,
#[cfg(debug_assertions)]
disp_buf: alloc::string::String,
}
#[cfg(feature = "alloc")]
impl InstrLogger {
/// # Safety
/// - `addr` needs to point to a valid instruction
#[cfg(debug_assertions)]
pub unsafe fn display_instr(&mut self, addr: Address, regs: &[Value]) -> &str {
let instr = hbbytecode::Instr::try_from(unsafe { *(addr.get() as *const u8) }).unwrap();
let mut bytes =
unsafe { core::slice::from_raw_parts(addr.get() as *const u8, instr.size()) };
use core::fmt::Write;
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
debug_assert!(bytes.is_empty());
self.disp_buf.clear();
write!(self.disp_buf, "{:<10}", alloc::format!("{instr:?}")).unwrap();
for (i, op) in self.op_buf.drain(..).enumerate() {
if i != 0 {
write!(self.disp_buf, ", ").unwrap();
}
write!(self.disp_buf, "{op:?}").unwrap();
if let hbbytecode::Oper::R(r) = op {
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
}
}
&self.disp_buf
}
#[cfg(not(debug_assertions))]
pub unsafe fn display_instr(&mut self, addr: Address, regs: &[Value]) -> &str {
""
}
}
/// Unhandled load access trap

View file

@ -55,6 +55,7 @@ where
// - Yes, we assume you run 64 bit CPU. Else ?conradluget a better CPU
// sorry 8 bit fans, HBVM won't run on your Speccy :(
unsafe {
self.memory.log_instr(self.pc, &self.registers);
match self
.memory
.prog_read::<u8>(self.pc as _)