Compare commits

...

14 commits

Author SHA1 Message Date
peony b1b3907bc1 Merge remote-tracking branch 'holey-bytes-main/trunk' into trunk 2024-11-03 15:08:23 +01:00
Jakub Doka 798000c756
little correction 2024-11-03 10:23:17 +01:00
Jakub Doka 9de631234d
adding unreachable 2024-11-03 10:15:03 +01:00
Jakub Doka 843fbddf3b
loops in inlined functions now work better 2024-11-03 08:59:42 +01:00
Jakub Doka 38a00cbaa0
some start for homemade regalloc 2024-10-31 14:56:55 +01:00
Jakub Doka 4664240e08
eliminating even more todos 2024-10-31 11:10:05 +01:00
Jakub Doka 728d563cea
eliminating more todos 2024-10-31 11:03:58 +01:00
Jakub Doka 56984f08ff
eliminating more todos 2024-10-31 10:56:59 +01:00
Jakub Doka 3f9f99ff65
adding optional values 2024-10-31 10:36:18 +01:00
Jakub Doka 9ed3c7ab9e
saving 2024-10-30 20:20:03 +01:00
Jakub Doka acacd10ee9
microoptimizing bitset 2024-10-30 18:42:25 +01:00
Jakub Doka f6f661cee3
finally struct operators fold into constant 2024-10-30 14:10:46 +01:00
Jakub Doka 4bfb5f192e
fixing the matrix 2024-10-30 13:45:19 +01:00
Jakub Doka ea628c1278
saving 2024-10-29 20:38:33 +01:00
26 changed files with 1765 additions and 797 deletions

View file

@ -23,8 +23,8 @@ hbjit = { path = "jit" }
[profile.release]
lto = true
debug = true
#strip = true
#debug = true
strip = true
codegen-units = 1
panic = "abort"
@ -32,7 +32,7 @@ panic = "abort"
rustflags = ["-Zfmt-debug=none", "-Zlocation-detail=none"]
inherits = "release"
opt-level = "z"
strip = true
strip = "debuginfo"
lto = true
codegen-units = 1
panic = "abort"

View file

@ -20,9 +20,9 @@ log = "0.4.22"
[dependencies.regalloc2]
git = "https://github.com/jakubDoka/regalloc2"
branch = "reuse-allocations"
features = ["trace-log"]
default-features = false
[features]
default = ["std"]
default = ["std", "regalloc2/trace-log"]
std = []
no_log = ["log/max_level_off"]

View file

@ -135,9 +135,6 @@ fib := fn(n: uint): uint {
#### pointers
```hb
main := fn(): uint {
n := @as(^uint, null)
if n != null return 9001
a := 1
b := &a
@ -161,6 +158,53 @@ drop := fn(a: uint): void {
}
```
#### nullable_types
```hb
main := fn(): uint {
a := &1
b := @as(?^uint, null)
if decide() b = a
if b == null return 9001
c := @as(?uint, *b)
if decide() c = null
if c != null return 42
d := @as(?u16, null)
if decide() d = 1
if d == null return 69
f := @as(?Foo, null)
if decide() f = .(a, 1)
if f == null return 34
bar := @as(?Bar, .(a, 1))
if decide() bar = null
if bar != null return 420
g := @as(?^uint, null)
g = a
_rd := *g
return d - *f.a
}
Foo := struct {a: ^uint, b: uint}
Bar := struct {a: ?^uint, b: uint}
decide := fn(): bool return true
```
#### structs
```hb
Ty := struct {
@ -419,6 +463,15 @@ main := fn(): uint {
}
```
#### die
```hb
main := fn(): never {
// simply emmits 'un' instruction that immediately terminates the execution
// the expresion has similar properties to 'return' but does not accept a value
die
}
```
### Incomplete Examples
#### comptime_pointers
@ -439,7 +492,7 @@ modify := fn($num: ^uint): void {
MALLOC_SYS_CALL := 69
FREE_SYS_CALL := 96
malloc := fn(size: uint, align: uint): ^void return @eca(MALLOC_SYS_CALL, size, align)
malloc := fn(size: uint, align: uint): ?^void return @eca(MALLOC_SYS_CALL, size, align)
free := fn(ptr: ^void, size: uint, align: uint): void return @eca(FREE_SYS_CALL, ptr, size, align)
Vec := fn($Elem: type): type {
@ -458,7 +511,7 @@ deinit := fn($Elem: type, vec: ^Vec(Elem)): void {
return
}
push := fn($Elem: type, vec: ^Vec(Elem), value: Elem): ^Elem {
push := fn($Elem: type, vec: ^Vec(Elem), value: Elem): ?^Elem {
if vec.len == vec.cap {
if vec.cap == 0 {
vec.cap = 1
@ -466,11 +519,11 @@ push := fn($Elem: type, vec: ^Vec(Elem), value: Elem): ^Elem {
vec.cap *= 2
}
new_alloc := @as(^Elem, @bitcast(malloc(vec.cap * @sizeof(Elem), @alignof(Elem))))
if new_alloc == 0 return @bitcast(0)
new_alloc := @as(?^Elem, @bitcast(malloc(vec.cap * @sizeof(Elem), @alignof(Elem))))
if new_alloc == null return null
src_cursor := vec.data
dst_cursor := new_alloc
dst_cursor := @as(^Elem, new_alloc)
end := vec.data + vec.len
loop if src_cursor == end break else {
@ -540,6 +593,30 @@ main := fn(): uint {
### Purely Testing Examples
#### only_break_loop
```hb
memory := @use("memory.hb")
bar := fn(): int {
loop if memory.inb(0x64) != 0 return 1
}
foo := fn(): void {
loop if (memory.inb(0x64) & 2) == 0 break
memory.outb(0x60, 0x0)
}
main := fn(): int {
@inline(foo)
return @inline(bar)
}
// in module: memory.hb
inb := fn(f: int): int return f
outb := fn(f: int, g: int): void {
}
```
#### reading_idk
```hb
main := fn(): int {
@ -1210,3 +1287,4 @@ main := fn(): int {
opaque := fn(): Foo {
return .(3, 2)
}
```

View file

@ -371,6 +371,7 @@ impl<'a> Formatter<'a> {
}
Expr::Bool { value, .. } => f.write_str(if value { "true" } else { "false" }),
Expr::Idk { .. } => f.write_str("idk"),
Expr::Die { .. } => f.write_str("die"),
Expr::Null { .. } => f.write_str("null"),
Expr::BinOp {
left,

View file

@ -134,6 +134,7 @@ pub enum TokenKind {
False,
Null,
Idk,
Die,
Ctor,
Tupl,
@ -306,6 +307,7 @@ gen_token_kind! {
False = b"false",
Null = b"null",
Idk = b"idk",
Die = b"die",
#[punkt]
Ctor = ".{",
Tupl = ".(",

View file

@ -23,7 +23,8 @@
slice_from_ptr_range,
is_sorted,
iter_next_chunk,
pointer_is_aligned_to
pointer_is_aligned_to,
maybe_uninit_fill
)]
#![warn(clippy::dbg_macro)]
#![expect(stable_features, internal_features)]
@ -67,9 +68,10 @@ pub mod fs;
pub mod fuzz;
pub mod lexer;
pub mod parser;
pub mod regalloc;
pub mod son;
mod vc;
mod utils;
mod debug {
pub fn panicking() -> bool {
@ -289,6 +291,7 @@ mod ty {
pub type Builtin = u32;
pub type Struct = u32;
pub type Opt = u32;
pub type Ptr = u32;
pub type Func = u32;
pub type Global = u32;
@ -372,6 +375,7 @@ mod ty {
crate::SymKey::Struct(st.file, st.pos, st.captures)
}
Kind::Ptr(p) => crate::SymKey::Pointer(&ctx.ptrs[p as usize]),
Kind::Opt(p) => crate::SymKey::Optional(&ctx.opts[p as usize]),
Kind::Func(f) => {
let fc = &ctx.funcs[f as usize];
if let Some(base) = fc.base {
@ -440,6 +444,10 @@ mod ty {
matches!(self.expand(), Kind::Ptr(_)) || self.is_never()
}
pub fn is_optional(self) -> bool {
matches!(self.expand(), Kind::Opt(_)) || self.is_never()
}
pub fn try_upcast(self, ob: Self) -> Option<Self> {
let (oa, ob) = (Self(self.0.min(ob.0)), Self(self.0.max(ob.0)));
let (a, b) = (oa.strip_pointer(), ob.strip_pointer());
@ -447,6 +455,7 @@ mod ty {
_ if oa == Self::from(NEVER) => ob,
_ if ob == Self::from(NEVER) => oa,
_ if oa == ob => oa,
_ if ob.is_optional() => ob,
_ if oa.is_pointer() && ob.is_pointer() => return None,
_ if a.is_signed() && b.is_signed() || a.is_unsigned() && b.is_unsigned() => ob,
_ if a.is_unsigned() && b.is_signed() && a.repr() - U8 < b.repr() - I8 => ob,
@ -489,9 +498,16 @@ mod ty {
pub(crate) fn loc(&self, tys: &Types) -> Loc {
match self.expand() {
Kind::Opt(o)
if let ty = tys.ins.opts[o as usize].base
&& ty.loc(tys) == Loc::Reg
&& (ty.is_pointer() || tys.size_of(ty) < 8) =>
{
Loc::Reg
}
Kind::Ptr(_) | Kind::Builtin(_) => Loc::Reg,
Kind::Struct(_) if tys.size_of(*self) == 0 => Loc::Reg,
Kind::Struct(_) | Kind::Slice(_) => Loc::Stack,
Kind::Struct(_) | Kind::Slice(_) | Kind::Opt(_) => Loc::Stack,
Kind::Func(_) | Kind::Global(_) | Kind::Module(_) => unreachable!(),
}
}
@ -633,10 +649,11 @@ mod ty {
Builtin,
Struct,
Ptr,
Slice,
Opt,
Func,
Global,
Module,
Slice,
}
}
@ -674,6 +691,10 @@ mod ty {
f.write_str("]")
}
TK::Builtin(ty) => f.write_str(to_str(ty)),
TK::Opt(ty) => {
f.write_str("?")?;
self.rety(self.tys.ins.opts[ty as usize].base).fmt(f)
}
TK::Ptr(ty) => {
f.write_str("^")?;
self.rety(self.tys.ins.ptrs[ty as usize].base).fmt(f)
@ -729,6 +750,7 @@ type Size = u32;
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub enum SymKey<'a> {
Pointer(&'a Ptr),
Optional(&'a Opt),
Struct(FileId, Pos, ty::Tuple),
FuncInst(ty::Func, ty::Tuple),
Decl(FileId, Ident),
@ -839,7 +861,12 @@ struct Struct {
field_start: u32,
}
#[derive(PartialEq, Eq, Hash)]
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct Opt {
base: ty::Id,
}
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct Ptr {
base: ty::Id,
}
@ -934,6 +961,7 @@ pub struct TypeIns {
structs: Vec<Struct>,
fields: Vec<Field>,
ptrs: Vec<Ptr>,
opts: Vec<Opt>,
slices: Vec<Array>,
}
@ -995,16 +1023,14 @@ trait TypeParser {
let Some((Expr::BinOp { left, right, .. }, name)) = f.find_decl(id) else {
return match id {
Ok(name) => {
let name = files[from_file as usize].ident_str(name);
self.report(from_file, pos, format_args!("undefined indentifier: {name}"))
}
Ok(_) => ty::Id::NEVER,
Err("main") => self.report(
from_file,
pos,
format_args!(
"missing main function in '{}', compiler can't \
emmit libraries since such concept is not defined",
emmit libraries since such concept is not defined \
(minimal main function: `main := fn(): void {{}}`)",
f.path
),
),
@ -1064,6 +1090,10 @@ trait TypeParser {
let base = self.parse_ty(file, val, None, files);
self.tys().make_ptr(base)
}
Expr::UnOp { op: TokenKind::Que, val, .. } => {
let base = self.parse_ty(file, val, None, files);
self.tys().make_opt(base)
}
Expr::Ident { id, .. } if id.is_null() => id.len().into(),
Expr::Ident { id, pos, .. } => self.find_type(pos, file, file, Ok(id), files),
Expr::Field { target, pos, name }
@ -1205,64 +1235,44 @@ impl Types {
(ret, iter)
}
fn make_ptr(&mut self, base: ty::Id) -> ty::Id {
ty::Kind::Ptr(self.make_ptr_low(base)).compress()
}
fn make_ptr_low(&mut self, base: ty::Id) -> ty::Ptr {
let ptr = Ptr { base };
let (entry, hash) = self.syms.entry(SymKey::Pointer(&ptr), &self.ins);
match entry {
hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.value,
hash_map::RawEntryMut::Vacant(v) => {
self.ins.ptrs.push(ptr);
v.insert(
ctx_map::Key {
value: ty::Kind::Ptr(self.ins.ptrs.len() as u32 - 1).compress(),
hash,
},
(),
fn make_opt(&mut self, base: ty::Id) -> ty::Id {
self.make_generic_ty(
Opt { base },
|ins| &mut ins.opts,
|e| SymKey::Optional(e),
ty::Kind::Opt,
)
.0
.value
}
}
.expand()
.inner()
}
fn make_array(&mut self, ty: ty::Id, len: ArrayLen) -> ty::Id {
ty::Kind::Slice(self.make_array_low(ty, len)).compress()
fn make_ptr(&mut self, base: ty::Id) -> ty::Id {
self.make_generic_ty(
Ptr { base },
|ins| &mut ins.ptrs,
|e| SymKey::Pointer(e),
ty::Kind::Ptr,
)
}
fn make_array_low(&mut self, ty: ty::Id, len: ArrayLen) -> ty::Slice {
self.syms
.get_or_insert(SymKey::Array(&Array { elem: ty, len }), &mut self.ins, |ins| {
ins.slices.push(Array { elem: ty, len });
ty::Kind::Slice(ins.slices.len() as u32 - 1).compress()
fn make_array(&mut self, elem: ty::Id, len: ArrayLen) -> ty::Id {
self.make_generic_ty(
Array { elem, len },
|ins| &mut ins.slices,
|e| SymKey::Array(e),
ty::Kind::Slice,
)
}
fn make_generic_ty<T: Copy>(
&mut self,
ty: T,
get_col: fn(&mut TypeIns) -> &mut Vec<T>,
key: fn(&T) -> SymKey,
kind: fn(u32) -> ty::Kind,
) -> ty::Id {
*self.syms.get_or_insert(key(&{ ty }), &mut self.ins, |ins| {
get_col(ins).push(ty);
kind(get_col(ins).len() as u32 - 1).compress()
})
.expand()
.inner()
//let array = Array { ty, len };
//let (entry, hash) = self.syms.entry(SymKey::Array(&array), &self.ins);
//match entry {
// hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.value,
// hash_map::RawEntryMut::Vacant(v) => {
// self.ins.arrays.push(array);
// v.insert(
// ctx_map::Key {
// value: ty::Kind::Slice(self.ins.ptrs.len() as u32 - 1).compress(),
// hash,
// },
// (),
// )
// .0
// .value
// }
//}
//.expand()
//.inner()
}
fn size_of(&self, ty: ty::Id) -> Size {
@ -1285,6 +1295,14 @@ impl Types {
self.ins.structs[stru as usize].size.set(oiter.offset);
oiter.offset
}
ty::Kind::Opt(opt) => {
let base = self.ins.opts[opt as usize].base;
if self.nieche_of(base).is_some() {
self.size_of(base)
} else {
self.size_of(base) + self.align_of(base)
}
}
_ if let Some(size) = ty.simple_size() => size,
ty => unimplemented!("size_of: {:?}", ty),
}
@ -1327,6 +1345,37 @@ impl Types {
}
}
fn inner_of(&self, ty: ty::Id) -> Option<ty::Id> {
match ty.expand() {
ty::Kind::Opt(o) => Some(self.ins.opts[o as usize].base),
_ => None,
}
}
fn opt_layout(&self, inner_ty: ty::Id) -> OptLayout {
match self.nieche_of(inner_ty) {
Some((_, flag_offset, flag_ty)) => {
OptLayout { flag_ty, flag_offset, payload_offset: 0 }
}
None => OptLayout {
flag_ty: ty::Id::BOOL,
flag_offset: 0,
payload_offset: self.align_of(inner_ty),
},
}
}
fn nieche_of(&self, ty: ty::Id) -> Option<(bool, Offset, ty::Id)> {
match ty.expand() {
ty::Kind::Ptr(_) => Some((false, 0, ty::Id::UINT)),
// TODO: cache this
ty::Kind::Struct(s) => OffsetIter::new(s, self).into_iter(self).find_map(|(f, off)| {
self.nieche_of(f.ty).map(|(uninit, o, ty)| (uninit, o + off, ty))
}),
_ => None,
}
}
fn find_struct_field(&self, s: ty::Struct, name: &str) -> Option<usize> {
let name = self.names.project(name)?;
self.struct_fields(s).iter().position(|f| f.name == name)
@ -1355,6 +1404,12 @@ impl Types {
}
}
struct OptLayout {
flag_ty: ty::Id,
flag_offset: Offset,
payload_offset: Offset,
}
struct OffsetIter {
strct: ty::Struct,
offset: Offset,

View file

@ -337,6 +337,7 @@ impl<'a, 'b> Parser<'a, 'b> {
T::False => E::Bool { pos, value: false },
T::Null => E::Null { pos },
T::Idk => E::Idk { pos },
T::Die => E::Die { pos },
T::DQuote => E::String { pos, literal: self.tok_str(token) },
T::Packed => {
self.packed = true;
@ -443,7 +444,7 @@ impl<'a, 'b> Parser<'a, 'b> {
pos
},
},
T::Band | T::Mul | T::Xor | T::Sub => E::UnOp {
T::Band | T::Mul | T::Xor | T::Sub | T::Que => E::UnOp {
pos,
op: token.kind,
val: {
@ -903,6 +904,10 @@ generate_expr! {
Idk {
pos: Pos,
},
/// `'die'`
Die {
pos: Pos,
},
/// `'@' Ident List('(', ',', ')', Expr)`
Directive {
pos: Pos,

150
lang/src/regalloc.rs Normal file
View file

@ -0,0 +1,150 @@
use {crate::reg::Reg, alloc::vec::Vec, core::ops::Range};
type Nid = u16;
pub trait Ctx {
fn uses_of(&self, nid: Nid) -> impl Iterator<Item = Nid>;
fn params_of(&self, nid: Nid) -> impl Iterator<Item = Nid>;
fn args_of(&self, nid: Nid) -> impl Iterator<Item = Nid>;
fn dom_of(&self, nid: Nid) -> Nid;
}
pub struct Env<'a, C: Ctx> {
ctx: &'a C,
func: &'a Func,
res: &'a mut Res,
}
impl<'a, C: Ctx> Env<'a, C> {
pub fn new(ctx: &'a C, func: &'a Func, res: &'a mut Res) -> Self {
Self { ctx, func, res }
}
pub fn run(&mut self) {
self.res.reg_to_node.clear();
self.res.reg_to_node.resize(self.func.instrs.len(), 0);
let mut bundle = Bundle::new(self.func.instrs.len());
for &inst in &self.func.instrs {
for uinst in self.ctx.uses_of(inst) {
let mut cursor = self.ctx.dom_of(uinst);
while cursor != self.ctx.dom_of(inst) {
let mut range = self.func.blocks
[self.func.id_to_block[cursor as usize] as usize]
.range
.clone();
range.start = range.start.max(inst as usize);
range.end = range.end.min(uinst as usize);
bundle.add(range);
cursor = self.ctx.dom_of(cursor);
}
}
match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(&bundle)) {
Some((i, other)) => {
other.merge(&bundle);
bundle.clear();
self.res.reg_to_node[inst as usize] = i as Reg;
}
None => {
self.res.reg_to_node[inst as usize] = self.res.bundles.len() as Reg;
self.res.bundles.push(bundle);
bundle = Bundle::new(self.func.instrs.len());
}
}
}
}
}
pub struct Res {
bundles: Vec<Bundle>,
pub reg_to_node: Vec<Reg>,
}
pub struct Bundle {
//unit_range: Range<usize>,
//set: BitSet,
taken: Vec<bool>,
}
impl Bundle {
fn new(size: usize) -> Self {
Self { taken: vec![false; size] }
}
fn add(&mut self, range: Range<usize>) {
self.taken[range].fill(true);
}
fn overlaps(&self, other: &Self) -> bool {
self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
}
fn merge(&mut self, other: &Self) {
debug_assert!(!self.overlaps(other));
self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a = *b);
}
fn clear(&mut self) {
self.taken.fill(false);
}
//fn overlaps(&self, other: &Self) -> bool {
// if self.unit_range.start >= other.unit_range.end
// || self.unit_range.end <= other.unit_range.start
// {
// return false;
// }
// let [mut a, mut b] = [self, other];
// if a.unit_range.start > b.unit_range.start {
// mem::swap(&mut a, &mut b);
// }
// let [mut tmp_a, mut tmp_b] = [0; 2];
// let [units_a, units_b] = [a.set.units(&mut tmp_a), b.set.units(&mut tmp_b)];
// let len = a.unit_range.len().min(b.unit_range.len());
// let [units_a, units_b] =
// [&units_a[b.unit_range.start - a.unit_range.start..][..len], &units_b[..len]];
// units_a.iter().zip(units_b).any(|(&a, &b)| a & b != 0)
//}
//fn merge(mut self, mut other: Self) -> Self {
// debug_assert!(!self.overlaps(&other));
// if self.unit_range.start > other.unit_range.start {
// mem::swap(&mut self, &mut other);
// }
// let final_range = self.unit_range.start..self.unit_range.end.max(other.unit_range.end);
// self.set.reserve(final_range.len());
// let mut tmp = 0;
// let other_units = other.set.units(&mut tmp);
// match self.set.units_mut() {
// Ok(units) => {
// units[other.unit_range.start - self.unit_range.start..]
// .iter_mut()
// .zip(other_units)
// .for_each(|(a, b)| *a |= b);
// }
// Err(view) => view.add_mask(tmp),
// }
// self
//}
}
pub struct Func {
pub blocks: Vec<Block>,
pub instrs: Vec<Nid>,
pub id_to_instr: Vec<Nid>,
pub id_to_block: Vec<Nid>,
}
pub struct Block {
pub range: Range<usize>,
pub start_id: Nid,
pub eld_id: Nid,
}

File diff suppressed because it is too large Load diff

View file

@ -6,13 +6,12 @@ use {
son::{write_reloc, Kind, MEM},
task,
ty::{self, Arg, Loc},
vc::{BitSet, Vc},
utils::{BitSet, Vc},
HashMap, Offset, PLoc, Reloc, Sig, Size, TypedReloc, Types,
},
alloc::{borrow::ToOwned, string::String, vec::Vec},
alloc::{borrow::ToOwned, boxed::Box, collections::BTreeMap, string::String, vec::Vec},
core::{assert_matches::debug_assert_matches, mem},
hbbytecode::{self as instrs, *},
std::{boxed::Box, collections::BTreeMap},
};
impl Types {
@ -226,17 +225,20 @@ impl ItemCtx {
let node = &fuc.nodes[nid];
let mut extend = |base: ty::Id, dest: ty::Id, from: usize, to: usize| {
if base.simple_size() == dest.simple_size() {
let (bsize, dsize) = (tys.size_of(base), tys.size_of(dest));
debug_assert!(bsize <= 8, "{}", ty::Display::new(tys, files, base));
debug_assert!(dsize <= 8, "{}", ty::Display::new(tys, files, dest));
if bsize == dsize {
return Default::default();
}
match (base.is_signed(), dest.is_signed()) {
(true, true) => {
let op = [instrs::sxt8, instrs::sxt16, instrs::sxt32]
[base.simple_size().unwrap().ilog2() as usize];
[bsize.ilog2() as usize];
op(atr(allocs[to]), atr(allocs[from]))
}
_ => {
let mask = (1u64 << (base.simple_size().unwrap() * 8)) - 1;
let mask = (1u64 << (bsize * 8)) - 1;
instrs::andi(atr(allocs[to]), atr(allocs[from]), mask)
}
}
@ -306,6 +308,9 @@ impl ItemCtx {
self.emit(instrs::jmp(0));
}
}
Kind::Die => {
self.emit(instrs::un());
}
Kind::CInt { value } if node.ty.is_float() => {
self.emit(match node.ty {
ty::Id::F32 => instrs::li32(
@ -408,12 +413,20 @@ impl ItemCtx {
}
if let Some(PLoc::WideReg(r, size)) = ret {
debug_assert_eq!(
fuc.nodes[*node.inputs.last().unwrap()].kind,
Kind::Stck
);
let stck = fuc.nodes[*node.inputs.last().unwrap()].offset;
self.emit(instrs::st(r, reg::STACK_PTR, stck as _, size));
}
if let Some(PLoc::Reg(r, size)) = ret
&& node.ty.loc(tys) == Loc::Stack
{
debug_assert_eq!(
fuc.nodes[*node.inputs.last().unwrap()].kind,
Kind::Stck
);
let stck = fuc.nodes[*node.inputs.last().unwrap()].offset;
self.emit(instrs::st(r, reg::STACK_PTR, stck as _, size));
}
@ -607,8 +620,10 @@ impl ItemCtx {
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, (pushed + stack) as _));
}
self.relocs.iter_mut().for_each(|r| r.reloc.offset -= stripped_prelude_size as u32);
if sig.ret != ty::Id::NEVER {
self.emit(instrs::jala(reg::ZERO, reg::RET_ADDR, 0));
}
}
}
#[derive(Debug)]
@ -812,6 +827,10 @@ impl<'a> Function<'a> {
self.add_instr(nid, ops);
self.emit_node(node.outputs[0], nid);
}
Kind::Die => {
self.add_instr(nid, vec![]);
self.emit_node(node.outputs[0], nid);
}
Kind::CInt { .. }
if node.outputs.iter().all(|&o| {
let ond = &self.nodes[o];
@ -1157,7 +1176,7 @@ impl regalloc2::Function for Function<'_> {
}
fn is_ret(&self, insn: regalloc2::Inst) -> bool {
self.nodes[self.instrs[insn.index()].nid].kind == Kind::Return
matches!(self.nodes[self.instrs[insn.index()].nid].kind, Kind::Return | Kind::Die)
}
fn is_branch(&self, insn: regalloc2::Inst) -> bool {

528
lang/src/utils.rs Normal file
View file

@ -0,0 +1,528 @@
#![expect(dead_code)]
use {
alloc::alloc,
core::{
alloc::Layout,
fmt::Debug,
hint::unreachable_unchecked,
mem::MaybeUninit,
ops::{Deref, DerefMut, Not},
ptr::Unique,
},
};
type Nid = u16;
pub union BitSet {
inline: usize,
alloced: Unique<AllocedBitSet>,
}
impl Debug for BitSet {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl Clone for BitSet {
fn clone(&self) -> Self {
if self.is_inline() {
Self { inline: unsafe { self.inline } }
} else {
let (data, _) = self.data_and_len();
let (layout, _) = Self::layout(data.len());
unsafe {
let ptr = alloc::alloc(layout);
ptr.copy_from_nonoverlapping(self.alloced.as_ptr() as _, layout.size());
Self { alloced: Unique::new_unchecked(ptr as _) }
}
}
}
}
impl Drop for BitSet {
fn drop(&mut self) {
if !self.is_inline() {
unsafe {
let cap = self.alloced.as_ref().cap;
alloc::dealloc(self.alloced.as_ptr() as _, Self::layout(cap).0);
}
}
}
}
impl Default for BitSet {
fn default() -> Self {
Self { inline: Self::FLAG }
}
}
impl BitSet {
const FLAG: usize = 1 << (Self::UNIT - 1);
const INLINE_ELEMS: usize = Self::UNIT - 1;
const UNIT: usize = core::mem::size_of::<usize>() * 8;
fn is_inline(&self) -> bool {
unsafe { self.inline & Self::FLAG != 0 }
}
fn data_and_len(&self) -> (&[usize], usize) {
unsafe {
if self.is_inline() {
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
} else {
let small_vec = self.alloced.as_ref();
(
core::slice::from_raw_parts(
&small_vec.data as *const _ as *const usize,
small_vec.cap,
),
small_vec.cap * core::mem::size_of::<usize>() * 8,
)
}
}
}
fn data_mut_and_len(&mut self) -> (&mut [usize], usize) {
unsafe {
if self.is_inline() {
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
} else {
let small_vec = self.alloced.as_mut();
(
core::slice::from_raw_parts_mut(
&mut small_vec.data as *mut _ as *mut usize,
small_vec.cap,
),
small_vec.cap * Self::UNIT,
)
}
}
}
fn indexes(index: usize) -> (usize, usize) {
(index / Self::UNIT, index % Self::UNIT)
}
pub fn get(&self, index: Nid) -> bool {
let index = index as usize;
let (data, len) = self.data_and_len();
if index >= len {
return false;
}
let (elem, bit) = Self::indexes(index);
(unsafe { *data.get_unchecked(elem) }) & (1 << bit) != 0
}
pub fn set(&mut self, index: Nid) -> bool {
let index = index as usize;
let (mut data, len) = self.data_mut_and_len();
if core::intrinsics::unlikely(index >= len) {
self.grow(index.next_power_of_two().max(4 * Self::UNIT));
(data, _) = self.data_mut_and_len();
}
let (elem, bit) = Self::indexes(index);
let elem = unsafe { data.get_unchecked_mut(elem) };
let prev = *elem;
*elem |= 1 << bit;
*elem != prev
}
fn grow(&mut self, size: usize) {
debug_assert!(size.is_power_of_two());
let slot_count = size / Self::UNIT;
let (layout, off) = Self::layout(slot_count);
let (ptr, prev_len) = unsafe {
if self.is_inline() {
let ptr = alloc::alloc(layout);
*ptr.add(off).cast::<usize>() = self.inline & !Self::FLAG;
(ptr, 1)
} else {
let prev_len = self.alloced.as_ref().cap;
let (prev_layout, _) = Self::layout(prev_len);
(alloc::realloc(self.alloced.as_ptr() as _, prev_layout, layout.size()), prev_len)
}
};
unsafe {
MaybeUninit::fill(
core::slice::from_raw_parts_mut(
ptr.add(off).cast::<MaybeUninit<usize>>().add(prev_len),
slot_count - prev_len,
),
0,
);
*ptr.cast::<usize>() = slot_count;
core::ptr::write(self, Self { alloced: Unique::new_unchecked(ptr as _) });
}
}
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
unsafe {
core::alloc::Layout::new::<AllocedBitSet>()
.extend(Layout::array::<usize>(slot_count).unwrap_unchecked())
.unwrap_unchecked()
}
}
pub fn iter(&self) -> BitSetIter {
if self.is_inline() {
BitSetIter { index: 0, current: unsafe { self.inline & !Self::FLAG }, remining: &[] }
} else {
let &[current, ref remining @ ..] = self.data_and_len().0 else {
unsafe { unreachable_unchecked() }
};
BitSetIter { index: 0, current, remining }
}
}
pub fn clear(&mut self, len: usize) {
self.reserve(len);
if self.is_inline() {
unsafe { self.inline &= Self::FLAG };
} else {
self.data_mut_and_len().0.fill(0);
}
}
pub fn units<'a>(&'a self, slot: &'a mut usize) -> &'a [usize] {
if self.is_inline() {
*slot = unsafe { self.inline } & !Self::FLAG;
core::slice::from_ref(slot)
} else {
self.data_and_len().0
}
}
pub fn reserve(&mut self, len: usize) {
if len > self.data_and_len().1 {
self.grow(len.next_power_of_two().max(4 * Self::UNIT));
}
}
pub fn units_mut(&mut self) -> Result<&mut [usize], &mut InlineBitSetView> {
if self.is_inline() {
Err(unsafe {
core::mem::transmute::<&mut usize, &mut InlineBitSetView>(&mut self.inline)
})
} else {
Ok(self.data_mut_and_len().0)
}
}
}
pub struct InlineBitSetView(usize);
impl InlineBitSetView {
pub(crate) fn add_mask(&mut self, tmp: usize) {
debug_assert!(tmp & BitSet::FLAG == 0);
self.0 |= tmp;
}
}
pub struct BitSetIter<'a> {
index: usize,
current: usize,
remining: &'a [usize],
}
impl Iterator for BitSetIter<'_> {
type Item = usize;
fn next(&mut self) -> Option<Self::Item> {
while self.current == 0 {
self.current = *self.remining.take_first()?;
self.index += 1;
}
let sub_idx = self.current.trailing_zeros() as usize;
self.current &= self.current - 1;
Some(self.index * BitSet::UNIT + sub_idx)
}
}
struct AllocedBitSet {
cap: usize,
data: [usize; 0],
}
#[cfg(test)]
#[test]
fn test_small_bit_set() {
use std::vec::Vec;
let mut sv = BitSet::default();
sv.set(10);
debug_assert!(sv.get(10));
sv.set(100);
debug_assert!(sv.get(100));
sv.set(10000);
debug_assert!(sv.get(10000));
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[10, 100, 10000]);
sv.clear(10000);
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[]);
}
pub union Vc {
inline: InlineVc,
alloced: AllocedVc,
}
impl Default for Vc {
fn default() -> Self {
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
}
}
impl Debug for Vc {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.as_slice().fmt(f)
}
}
impl FromIterator<Nid> for Vc {
fn from_iter<T: IntoIterator<Item = Nid>>(iter: T) -> Self {
let mut slf = Self::default();
for i in iter {
slf.push(i);
}
slf
}
}
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
const VC_SIZE: usize = 16;
impl Vc {
fn is_inline(&self) -> bool {
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
}
fn layout(&self) -> Option<core::alloc::Layout> {
unsafe {
self.is_inline().not().then(|| {
core::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked()
})
}
}
pub fn len(&self) -> usize {
unsafe {
if self.is_inline() {
self.inline.cap as _
} else {
self.alloced.len as _
}
}
}
fn len_mut(&mut self) -> &mut Nid {
unsafe {
if self.is_inline() {
&mut self.inline.cap
} else {
&mut self.alloced.len
}
}
}
fn as_ptr(&self) -> *const Nid {
unsafe {
match self.is_inline() {
true => self.inline.elems.as_ptr().cast(),
false => self.alloced.base.as_ptr(),
}
}
}
fn as_mut_ptr(&mut self) -> *mut Nid {
unsafe {
match self.is_inline() {
true => self.inline.elems.as_mut_ptr().cast(),
false => self.alloced.base.as_ptr(),
}
}
}
pub fn as_slice(&self) -> &[Nid] {
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
}
fn as_slice_mut(&mut self) -> &mut [Nid] {
unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
}
pub fn push(&mut self, value: Nid) {
if let Some(layout) = self.layout()
&& unsafe { self.alloced.len == self.alloced.cap }
{
unsafe {
self.alloced.cap *= 2;
self.alloced.base = Unique::new_unchecked(
alloc::realloc(
self.alloced.base.as_ptr().cast(),
layout,
self.alloced.cap as usize * core::mem::size_of::<Nid>(),
)
.cast(),
);
}
} else if self.len() == INLINE_ELEMS {
unsafe {
let mut allcd =
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
core::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
*self = allcd;
}
}
unsafe {
*self.len_mut() += 1;
self.as_mut_ptr().add(self.len() - 1).write(value);
}
}
unsafe fn alloc(cap: usize, len: usize) -> Self {
debug_assert!(cap > INLINE_ELEMS);
let layout = unsafe { core::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
let alloc = unsafe { alloc::alloc(layout) };
unsafe {
Vc {
alloced: AllocedVc {
base: Unique::new_unchecked(alloc.cast()),
len: len as _,
cap: cap as _,
},
}
}
}
pub fn swap_remove(&mut self, index: usize) {
let len = self.len() - 1;
self.as_slice_mut().swap(index, len);
*self.len_mut() -= 1;
}
pub fn remove(&mut self, index: usize) {
self.as_slice_mut().copy_within(index + 1.., index);
*self.len_mut() -= 1;
}
}
impl Drop for Vc {
fn drop(&mut self) {
if let Some(layout) = self.layout() {
unsafe {
alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
}
}
}
}
impl Clone for Vc {
fn clone(&self) -> Self {
self.as_slice().into()
}
}
impl IntoIterator for Vc {
type IntoIter = VcIntoIter;
type Item = Nid;
fn into_iter(self) -> Self::IntoIter {
VcIntoIter { start: 0, end: self.len(), vc: self }
}
}
pub struct VcIntoIter {
start: usize,
end: usize,
vc: Vc,
}
impl Iterator for VcIntoIter {
type Item = Nid;
fn next(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
let ret = unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
self.start += 1;
Some(ret)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.end - self.start;
(len, Some(len))
}
}
impl DoubleEndedIterator for VcIntoIter {
fn next_back(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
self.end -= 1;
Some(unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
}
}
impl ExactSizeIterator for VcIntoIter {}
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
fn from(value: [Nid; SIZE]) -> Self {
value.as_slice().into()
}
}
impl<'a> From<&'a [Nid]> for Vc {
fn from(value: &'a [Nid]) -> Self {
if value.len() <= INLINE_ELEMS {
let mut dflt = Self::default();
unsafe {
core::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
};
dflt.inline.cap = value.len() as _;
dflt
} else {
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
unsafe {
core::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
};
allcd
}
}
}
impl Deref for Vc {
type Target = [Nid];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl DerefMut for Vc {
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_slice_mut()
}
}
#[derive(Clone, Copy)]
#[repr(C)]
struct InlineVc {
cap: Nid,
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
}
#[derive(Clone, Copy)]
#[repr(C)]
struct AllocedVc {
cap: Nid,
len: Nid,
base: Unique<Nid>,
}

View file

@ -1,306 +0,0 @@
use {
alloc::vec::Vec,
core::{
fmt::Debug,
mem::MaybeUninit,
ops::{Deref, DerefMut, Not},
ptr::Unique,
},
};
type Nid = u16;
const VC_SIZE: usize = 16;
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
pub union Vc {
inline: InlineVc,
alloced: AllocedVc,
}
impl Default for Vc {
fn default() -> Self {
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
}
}
impl Debug for Vc {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.as_slice().fmt(f)
}
}
impl FromIterator<Nid> for Vc {
fn from_iter<T: IntoIterator<Item = Nid>>(iter: T) -> Self {
let mut slf = Self::default();
for i in iter {
slf.push(i);
}
slf
}
}
impl Vc {
fn is_inline(&self) -> bool {
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
}
fn layout(&self) -> Option<core::alloc::Layout> {
unsafe {
self.is_inline().not().then(|| {
core::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked()
})
}
}
pub fn len(&self) -> usize {
unsafe {
if self.is_inline() {
self.inline.cap as _
} else {
self.alloced.len as _
}
}
}
fn len_mut(&mut self) -> &mut Nid {
unsafe {
if self.is_inline() {
&mut self.inline.cap
} else {
&mut self.alloced.len
}
}
}
fn as_ptr(&self) -> *const Nid {
unsafe {
match self.is_inline() {
true => self.inline.elems.as_ptr().cast(),
false => self.alloced.base.as_ptr(),
}
}
}
fn as_mut_ptr(&mut self) -> *mut Nid {
unsafe {
match self.is_inline() {
true => self.inline.elems.as_mut_ptr().cast(),
false => self.alloced.base.as_ptr(),
}
}
}
pub fn as_slice(&self) -> &[Nid] {
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
}
fn as_slice_mut(&mut self) -> &mut [Nid] {
unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
}
pub fn push(&mut self, value: Nid) {
if let Some(layout) = self.layout()
&& unsafe { self.alloced.len == self.alloced.cap }
{
unsafe {
self.alloced.cap *= 2;
self.alloced.base = Unique::new_unchecked(
alloc::alloc::realloc(
self.alloced.base.as_ptr().cast(),
layout,
self.alloced.cap as usize * core::mem::size_of::<Nid>(),
)
.cast(),
);
}
} else if self.len() == INLINE_ELEMS {
unsafe {
let mut allcd =
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
core::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
*self = allcd;
}
}
unsafe {
*self.len_mut() += 1;
self.as_mut_ptr().add(self.len() - 1).write(value);
}
}
unsafe fn alloc(cap: usize, len: usize) -> Self {
debug_assert!(cap > INLINE_ELEMS);
let layout = unsafe { core::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
let alloc = unsafe { alloc::alloc::alloc(layout) };
unsafe {
Vc {
alloced: AllocedVc {
base: Unique::new_unchecked(alloc.cast()),
len: len as _,
cap: cap as _,
},
}
}
}
pub fn swap_remove(&mut self, index: usize) {
let len = self.len() - 1;
self.as_slice_mut().swap(index, len);
*self.len_mut() -= 1;
}
pub fn remove(&mut self, index: usize) {
self.as_slice_mut().copy_within(index + 1.., index);
*self.len_mut() -= 1;
}
}
impl Drop for Vc {
fn drop(&mut self) {
if let Some(layout) = self.layout() {
unsafe {
alloc::alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
}
}
}
}
impl Clone for Vc {
fn clone(&self) -> Self {
self.as_slice().into()
}
}
impl IntoIterator for Vc {
type IntoIter = VcIntoIter;
type Item = Nid;
fn into_iter(self) -> Self::IntoIter {
VcIntoIter { start: 0, end: self.len(), vc: self }
}
}
pub struct VcIntoIter {
start: usize,
end: usize,
vc: Vc,
}
impl Iterator for VcIntoIter {
type Item = Nid;
fn next(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
let ret = unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
self.start += 1;
Some(ret)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.end - self.start;
(len, Some(len))
}
}
impl DoubleEndedIterator for VcIntoIter {
fn next_back(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
self.end -= 1;
Some(unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
}
}
impl ExactSizeIterator for VcIntoIter {}
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
fn from(value: [Nid; SIZE]) -> Self {
value.as_slice().into()
}
}
impl<'a> From<&'a [Nid]> for Vc {
fn from(value: &'a [Nid]) -> Self {
if value.len() <= INLINE_ELEMS {
let mut dflt = Self::default();
unsafe {
core::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
};
dflt.inline.cap = value.len() as _;
dflt
} else {
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
unsafe {
core::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
};
allcd
}
}
}
impl Deref for Vc {
type Target = [Nid];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl DerefMut for Vc {
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_slice_mut()
}
}
#[derive(Clone, Copy)]
#[repr(C)]
struct InlineVc {
cap: Nid,
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
}
#[derive(Clone, Copy)]
#[repr(C)]
struct AllocedVc {
cap: Nid,
len: Nid,
base: Unique<Nid>,
}
#[derive(Default, Clone)]
pub struct BitSet {
data: Vec<usize>,
}
impl BitSet {
const ELEM_SIZE: usize = core::mem::size_of::<usize>() * 8;
pub fn clear(&mut self, bit_size: usize) {
let new_len = bit_size.div_ceil(Self::ELEM_SIZE);
self.data.clear();
self.data.resize(new_len, 0);
}
pub fn set(&mut self, idx: Nid) -> bool {
let idx = idx as usize;
let data_idx = idx / Self::ELEM_SIZE;
let sub_idx = idx % Self::ELEM_SIZE;
let prev = self.data[data_idx] & (1 << sub_idx);
self.data[data_idx] |= 1 << sub_idx;
prev == 0
}
pub fn get(&self, idx: Nid) -> bool {
let idx = idx as usize;
let data_idx = idx / Self::ELEM_SIZE;
let sub_idx = idx % Self::ELEM_SIZE;
let prev = self.data[data_idx] & (1 << sub_idx);
prev != 0
}
}

View file

@ -0,0 +1,5 @@
main:
UN
code size: 9
ret: 0
status: Err(Unreachable)

View file

@ -1,20 +1,17 @@
deinit:
ADDI64 r254, r254, -48d
ST r31, r254, 24a, 24h
LD r5, r2, 16a, 8h
ADDI64 r254, r254, -16d
ST r31, r254, 0a, 16h
CP r32, r2
LD r5, r2, 16a, 8h
LI64 r4, 8d
MUL64 r3, r5, r4
CP r5, r32
LD r2, r5, 0a, 8h
JAL r31, r0, :free
ADDI64 r33, r254, 0d
CP r1, r33
CP r1, r32
JAL r31, r0, :new
CP r2, r32
BMC r33, r2, 24h
LD r31, r254, 24a, 24h
ADDI64 r254, r254, 48d
LD r31, r254, 0a, 16h
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
free:
CP r10, r2
@ -26,23 +23,21 @@ free:
ECA
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -80d
ST r31, r254, 48a, 32h
ADDI64 r32, r254, 24d
ADDI64 r254, r254, -48d
ST r31, r254, 24a, 24h
ADDI64 r32, r254, 0d
CP r1, r32
JAL r31, r0, :new
ADDI64 r33, r254, 0d
BMC r32, r33, 24h
LI64 r3, 69d
CP r2, r33
CP r2, r32
JAL r31, r0, :push
LD r12, r254, 0a, 8h
LD r34, r12, 0a, 8h
CP r2, r33
LD r9, r254, 0a, 8h
LD r33, r9, 0a, 8h
CP r2, r32
JAL r31, r0, :deinit
CP r1, r34
LD r31, r254, 48a, 32h
ADDI64 r254, r254, 80d
CP r1, r33
LD r31, r254, 24a, 24h
ADDI64 r254, r254, 48d
JALA r0, r31, 0a
malloc:
CP r9, r2
@ -80,52 +75,53 @@ push:
MUL64 r2, r36, r37
CP r3, r37
JAL r31, r0, :malloc
CP r38, r34
ST r36, r38, 16a, 8h
JNE r1, r35, :3
CP r1, r35
CP r38, r1
CP r39, r34
ST r36, r39, 16a, 8h
LI64 r1, 0d
CP r7, r38
JNE r7, r1, :3
JMP :4
3: CP r39, r1
CP r1, r35
LD r6, r38, 8a, 8h
MULI64 r8, r6, 8d
LD r12, r38, 0a, 8h
ADD64 r11, r12, r8
CP r3, r39
9: JNE r11, r12, :5
LD r8, r38, 8a, 8h
JEQ r8, r1, :6
3: CP r38, r7
LD r8, r39, 8a, 8h
MULI64 r10, r8, 8d
LD r3, r39, 0a, 8h
ADD64 r7, r3, r10
CP r5, r38
9: LD r2, r39, 0a, 8h
LD r10, r39, 8a, 8h
JNE r7, r3, :5
JEQ r10, r35, :6
CP r4, r37
MUL64 r3, r8, r4
LD r2, r38, 0a, 8h
MUL64 r3, r10, r4
JAL r31, r0, :free
CP r5, r39
CP r6, r38
JMP :7
6: CP r5, r39
7: ST r5, r38, 0a, 8h
6: CP r6, r38
7: ST r6, r39, 0a, 8h
JMP :8
5: CP r4, r37
CP r5, r39
ADDI64 r6, r3, 8d
ADDI64 r7, r12, 8d
LD r8, r12, 0a, 8h
ST r8, r3, 0a, 8h
CP r3, r6
CP r12, r7
CP r6, r38
ADDI64 r8, r5, 8d
ADDI64 r9, r3, 8d
LD r10, r3, 0a, 8h
ST r10, r5, 0a, 8h
CP r3, r9
CP r5, r8
JMP :9
0: CP r38, r34
8: LD r3, r38, 8a, 8h
MULI64 r5, r3, 8d
LD r4, r38, 0a, 8h
ADD64 r1, r4, r5
0: CP r39, r34
8: LD r5, r39, 8a, 8h
MULI64 r7, r5, 8d
LD r6, r39, 0a, 8h
ADD64 r1, r6, r7
CP r3, r32
ST r3, r1, 0a, 8h
LD r11, r38, 8a, 8h
ADD64 r2, r11, r33
ST r2, r38, 8a, 8h
LD r2, r39, 8a, 8h
ADD64 r3, r2, r33
ST r3, r39, 8a, 8h
4: LD r31, r254, 0a, 72h
ADDI64 r254, r254, 72d
JALA r0, r31, 0a
code size: 980
code size: 955
ret: 69
status: Ok(())

View file

@ -4,9 +4,9 @@ main:
LI64 r6, 128d
LI64 r7, 0d
ADDI64 r4, r254, 0d
2: JLTU r7, r6, :0
LD r1, r254, 42a, 1h
ANDI r1, r1, 255d
2: LD r12, r254, 42a, 1h
JLTU r7, r6, :0
ANDI r1, r12, 255d
JMP :1
0: ADDI64 r3, r7, 1d
ADD64 r7, r4, r7

View file

@ -23,16 +23,16 @@ scalar_values:
JALA r0, r31, 0a
structs:
ADDI64 r254, r254, -32d
LI64 r2, 5d
ST r2, r254, 16a, 8h
ST r2, r254, 24a, 8h
LD r6, r254, 16a, 8h
ADDI64 r8, r6, 15d
ST r8, r254, 0a, 8h
LI64 r7, 20d
ST r7, r254, 8a, 8h
LD r1, r254, 0a, 8h
SUB64 r1, r1, r7
LI64 r1, 5d
ST r1, r254, 0a, 8h
ST r1, r254, 8a, 8h
LD r5, r254, 0a, 8h
ADDI64 r7, r5, 15d
ST r7, r254, 16a, 8h
LI64 r10, 20d
ST r10, r254, 24a, 8h
LD r1, r254, 16a, 8h
SUB64 r1, r1, r10
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
code size: 310

View file

@ -0,0 +1,87 @@
decide:
LI8 r1, 1b
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -136d
ST r31, r254, 80a, 56h
JAL r31, r0, :decide
LI64 r4, 0d
ADDI64 r32, r254, 72d
ANDI r1, r1, 255d
JNE r1, r0, :0
CP r33, r4
JMP :1
0: CP r33, r32
1: JNE r33, r4, :2
LI64 r1, 9001d
JMP :3
2: JAL r31, r0, :decide
LI8 r34, 0b
LI8 r35, 1b
ANDI r1, r1, 255d
JNE r1, r0, :4
ST r35, r254, 56a, 1h
LD r9, r33, 0a, 8h
ST r9, r254, 64a, 8h
JMP :5
4: ST r34, r254, 56a, 1h
5: LD r6, r254, 56a, 1h
ANDI r6, r6, 255d
ANDI r34, r34, 255d
JEQ r6, r34, :6
LI64 r1, 42d
JMP :3
6: JAL r31, r0, :decide
LI32 r2, 0w
ANDI r1, r1, 255d
JNE r1, r0, :7
CP r36, r2
JMP :8
7: LI32 r36, 8388609w
8: ANDI r36, r36, 4294967295d
ANDI r2, r2, 4294967295d
JNE r36, r2, :9
LI64 r1, 69d
JMP :3
9: JAL r31, r0, :decide
LI64 r3, 0d
LI64 r37, 1d
ANDI r1, r1, 255d
JNE r1, r0, :10
ST r3, r254, 16a, 8h
JMP :11
10: ST r32, r254, 16a, 8h
ST r37, r254, 24a, 8h
ST r37, r254, 72a, 8h
11: LD r2, r254, 16a, 8h
JNE r2, r3, :12
LI64 r1, 34d
JMP :3
12: JAL r31, r0, :decide
ADDI64 r10, r254, 32d
ANDI r1, r1, 255d
JNE r1, r0, :13
ADDI64 r11, r254, 0d
ST r32, r254, 0a, 8h
ST r37, r254, 8a, 8h
ST r35, r254, 32a, 1h
ADDI64 r12, r10, 8d
BMC r11, r12, 16h
JMP :14
13: ST r34, r254, 32a, 1h
14: LD r11, r254, 32a, 1h
ANDI r11, r11, 255d
ANDI r34, r34, 255d
JEQ r11, r34, :15
LI64 r1, 420d
JMP :3
15: LD r5, r254, 16a, 8h
LD r7, r5, 0a, 8h
ANDI r9, r36, 65535d
SUB64 r1, r9, r7
3: LD r31, r254, 80a, 56h
ADDI64 r254, r254, 136d
JALA r0, r31, 0a
code size: 729
ret: 0
status: Ok(())

View file

@ -0,0 +1,30 @@
inb:
CP r1, r2
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -24d
ST r31, r254, 0a, 24h
LI64 r32, 0d
LI64 r33, 100d
4: CP r2, r33
JAL r31, r0, :inb
ANDI r7, r1, 2d
JNE r7, r32, :0
LI64 r2, 96d
CP r3, r32
JAL r31, r0, :outb
3: CP r2, r33
JAL r31, r0, :inb
JEQ r1, r32, :1
LI64 r1, 1d
JMP :2
1: JMP :3
0: JMP :4
2: LD r31, r254, 0a, 24h
ADDI64 r254, r254, 24d
JALA r0, r31, 0a
outb:
JALA r0, r31, 0a
code size: 198
ret: 1
status: Ok(())

View file

@ -1,25 +1,23 @@
main:
ADDI64 r254, r254, -72d
ST r31, r254, 56a, 16h
ADDI64 r32, r254, 0d
ADDI64 r254, r254, -48d
ST r31, r254, 40a, 8h
LI64 r4, 4d
ADDI64 r3, r254, 24d
ADDI64 r6, r254, 0d
ST r4, r254, 24a, 8h
LI64 r5, 1d
ST r5, r254, 32a, 8h
ST r5, r254, 16a, 8h
BMC r3, r6, 16h
JAL r31, r0, :opaque
ST r1, r254, 0a, 16h
LI64 r6, 4d
ADDI64 r5, r254, 40d
ADDI64 r8, r254, 16d
ST r6, r254, 40a, 8h
LI64 r7, 1d
ST r7, r254, 48a, 8h
ST r7, r254, 32a, 8h
BMC r5, r8, 16h
BMC r32, r8, 16h
LD r7, r254, 24a, 8h
LD r9, r254, 32a, 8h
ADD64 r11, r9, r7
LD r9, r254, 16a, 8h
SUB64 r1, r9, r11
LD r31, r254, 56a, 16h
ADDI64 r254, r254, 72d
LD r4, r254, 8a, 8h
LD r6, r254, 16a, 8h
ADD64 r8, r6, r4
LD r6, r254, 0a, 8h
SUB64 r1, r6, r8
LD r31, r254, 40a, 8h
ADDI64 r254, r254, 48d
JALA r0, r31, 0a
opaque:
ADDI64 r254, r254, -16d
@ -31,6 +29,6 @@ opaque:
LD r1, r2, 0a, 16h
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
code size: 339
code size: 323
ret: 0
status: Ok(())

View file

@ -1,29 +1,27 @@
main:
ADDI64 r254, r254, -24d
ST r31, r254, 8a, 16h
ADDI64 r32, r254, 4d
ADDI64 r254, r254, -12d
ST r31, r254, 4a, 8h
ADDI64 r2, r254, 0d
JAL r31, r0, :random_color
ST r1, r254, 4a, 4h
ADDI64 r5, r254, 0d
BMC r32, r5, 4h
LD r8, r254, 0a, 1h
LD r11, r254, 1a, 1h
LD r3, r254, 2a, 1h
ANDI r12, r8, 255d
ANDI r4, r11, 255d
LD r9, r254, 3a, 1h
ANDI r8, r3, 255d
ADD64 r7, r4, r12
ANDI r1, r9, 255d
ADD64 r12, r7, r8
ADD64 r1, r12, r1
LD r31, r254, 8a, 16h
ADDI64 r254, r254, 24d
ST r1, r254, 0a, 4h
LD r5, r254, 0a, 1h
LD r8, r254, 1a, 1h
LD r12, r254, 2a, 1h
ANDI r9, r5, 255d
ANDI r1, r8, 255d
LD r6, r254, 3a, 1h
ANDI r5, r12, 255d
ADD64 r4, r1, r9
ANDI r10, r6, 255d
ADD64 r9, r4, r5
ADD64 r1, r9, r10
LD r31, r254, 4a, 8h
ADDI64 r254, r254, 12d
JALA r0, r31, 0a
random_color:
LRA r1, r0, :white
LD r1, r1, 0a, 4h
JALA r0, r31, 0a
code size: 257
code size: 241
ret: 1020
status: Ok(())

View file

@ -1,14 +1,12 @@
main:
ADDI64 r254, r254, -4d
LRA r1, r0, :black
LRA r2, r0, :white
ADDI64 r3, r254, 0d
LRA r5, r0, :white
BMC r1, r3, 4h
BMC r5, r3, 4h
LD r9, r254, 3a, 1h
ANDI r1, r9, 255d
BMC r2, r3, 4h
LD r6, r254, 3a, 1h
ANDI r1, r6, 255d
ADDI64 r254, r254, 4d
JALA r0, r31, 0a
code size: 108
code size: 92
ret: 255
status: Ok(())

View file

@ -8,8 +8,8 @@ main:
6: JNE r8, r7, :0
LI64 r7, 2d
CP r8, r4
4: JNE r8, r6, :1
LD r1, r254, 0a, 8h
4: LD r1, r254, 0a, 8h
JNE r8, r6, :1
JMP :2
1: MUL64 r10, r8, r7
ADD64 r8, r8, r6
@ -19,14 +19,14 @@ main:
5: JNE r2, r7, :3
JMP :4
3: ADD64 r1, r2, r6
ADD64 r11, r10, r2
ADD64 r12, r9, r2
MULI64 r2, r11, 8d
ADD64 r11, r9, r2
MULI64 r3, r11, 8d
ADD64 r12, r10, r2
ADD64 r11, r5, r3
MULI64 r12, r12, 8d
ADD64 r11, r5, r2
ADD64 r12, r5, r12
BMC r12, r11, 8h
BMC r11, r12, 8h
BMC r12, r11, 8h
CP r2, r1
JMP :5
0: ADD64 r11, r8, r6

View file

@ -1,37 +1,6 @@
main:
ADDI64 r254, r254, -64d
LI64 r3, 1d
ADDI64 r2, r254, 32d
ST r3, r254, 32a, 8h
LI64 r6, 2d
ST r6, r254, 40a, 8h
LI64 r6, -3d
ADDI64 r5, r254, 0d
ADDI64 r11, r254, 48d
ST r6, r254, 0a, 8h
LI64 r6, -4d
BMC r2, r11, 16h
ST r6, r254, 8a, 8h
ADDI64 r3, r5, 16d
LD r9, r254, 56a, 8h
LI64 r8, 4d
LD r10, r254, 48a, 8h
LI64 r11, 3d
BMC r2, r3, 16h
SUB64 r4, r8, r9
LD r12, r254, 24a, 8h
ADD64 r7, r10, r11
LD r1, r254, 0a, 8h
SUB64 r8, r11, r10
LD r2, r254, 16a, 8h
ADD64 r6, r12, r4
ADD64 r3, r1, r7
ADD64 r10, r2, r8
ADD64 r12, r9, r6
ADD64 r9, r10, r3
ADD64 r1, r9, r12
ADDI64 r254, r254, 64d
LI64 r1, 10d
JALA r0, r31, 0a
code size: 308
code size: 29
ret: 10
status: Ok(())

View file

@ -1,24 +1,23 @@
main:
ADDI64 r254, r254, -72d
ST r31, r254, 48a, 24h
ADDI64 r254, r254, -56d
ST r31, r254, 32a, 24h
LI64 r3, 4d
ADDI64 r2, r254, 32d
ST r3, r254, 32a, 8h
ADDI64 r2, r254, 16d
ST r3, r254, 16a, 8h
LI64 r32, 3d
ST r32, r254, 40a, 8h
ADDI64 r33, r254, 16d
ST r32, r254, 24a, 8h
ADDI64 r33, r254, 0d
LD r3, r2, 0a, 16h
JAL r31, r0, :odher_pass
ST r1, r254, 16a, 16h
ADDI64 r2, r254, 0d
BMC r33, r2, 16h
LD r4, r254, 8a, 8h
JNE r4, r32, :0
ST r1, r254, 0a, 16h
LD r2, r254, 8a, 8h
JNE r2, r32, :0
CP r2, r33
JAL r31, r0, :pass
JMP :1
0: LI64 r1, 0d
1: LD r31, r254, 48a, 24h
ADDI64 r254, r254, 72d
1: LD r31, r254, 32a, 24h
ADDI64 r254, r254, 56d
JALA r0, r31, 0a
odher_pass:
ADDI64 r254, r254, -16d
@ -30,6 +29,6 @@ odher_pass:
pass:
LD r1, r2, 0a, 8h
JALA r0, r31, 0a
code size: 318
code size: 305
ret: 4
status: Ok(())

View file

@ -8,9 +8,9 @@ main:
4: JLTU r9, r8, :0
LI64 r4, 10d
CP r7, r6
3: JLTU r7, r4, :1
LD r10, r254, 2048a, 1h
ANDI r1, r10, 255d
3: LD r9, r254, 2048a, 1h
JLTU r7, r4, :1
ANDI r1, r9, 255d
JMP :2
1: ADD64 r12, r7, r6
MULI64 r1, r7, 1024d

View file

@ -1,45 +1,47 @@
main:
ADDI64 r254, r254, -48d
ST r31, r254, 32a, 16h
ADDI64 r32, r254, 16d
ADDI64 r254, r254, -24d
ST r31, r254, 16a, 8h
ADDI64 r3, r254, 0d
LI64 r4, 0d
CP r3, r4
JAL r31, r0, :maina
ST r1, r254, 16a, 16h
ADDI64 r7, r254, 0d
BMC r32, r7, 16h
LD r11, r254, 12a, 1h
LD r12, r254, 3a, 1h
SUB8 r2, r12, r11
ANDI r1, r2, 255d
LD r31, r254, 32a, 16h
ADDI64 r254, r254, 48d
ST r1, r254, 0a, 16h
LD r8, r254, 12a, 1h
LD r9, r254, 3a, 1h
SUB8 r11, r9, r8
ANDI r1, r11, 255d
LD r31, r254, 16a, 8h
ADDI64 r254, r254, 24d
JALA r0, r31, 0a
maina:
ADDI64 r254, r254, -36d
ST r31, r254, 28a, 8h
ADDI64 r5, r254, 24d
ADDI64 r254, r254, -28d
ST r31, r254, 20a, 8h
ADDI64 r5, r254, 16d
JAL r31, r0, :small_struct
ST r1, r254, 24a, 4h
LI8 r11, 0b
ADDI64 r10, r254, 0d
ST r11, r254, 0a, 1h
ST r11, r254, 1a, 1h
ST r11, r254, 2a, 1h
LI8 r4, 3b
ST r4, r254, 3a, 1h
LI8 r7, 1b
ST r7, r254, 4a, 1h
ST r11, r254, 5a, 1h
ST r11, r254, 6a, 1h
ST r11, r254, 7a, 1h
ADDI64 r1, r254, 8d
BMC r10, r1, 8h
ADDI64 r4, r1, 8d
BMC r10, r4, 8h
ST r1, r254, 16a, 4h
LI8 r9, 0b
ADDI64 r1, r254, 0d
ST r9, r254, 0a, 1h
ST r9, r254, 1a, 1h
ST r9, r254, 2a, 1h
LI8 r3, 3b
ST r3, r254, 3a, 1h
LI8 r6, 1b
ST r6, r254, 4a, 1h
ST r9, r254, 5a, 1h
ST r9, r254, 6a, 1h
ST r9, r254, 7a, 1h
ST r9, r254, 8a, 1h
ST r9, r254, 9a, 1h
ST r9, r254, 10a, 1h
ST r3, r254, 11a, 1h
ST r6, r254, 12a, 1h
ST r9, r254, 13a, 1h
ST r9, r254, 14a, 1h
ST r9, r254, 15a, 1h
LD r1, r1, 0a, 16h
LD r31, r254, 28a, 8h
ADDI64 r254, r254, 36d
LD r31, r254, 20a, 8h
ADDI64 r254, r254, 28d
JALA r0, r31, 0a
small_struct:
ADDI64 r254, r254, -4d
@ -50,6 +52,6 @@ small_struct:
LD r1, r3, 0a, 4h
ADDI64 r254, r254, 4d
JALA r0, r31, 0a
code size: 514
code size: 570
ret: 2
status: Ok(())