Compare commits

..

No commits in common. "f1ea01ef0c3a35c443d5ff1520510d621387870c" and "b2254e982079760ca9e2452a1ff6c4079684622f" have entirely different histories.

5 changed files with 292 additions and 414 deletions

10
Cargo.lock generated
View file

@ -110,12 +110,6 @@ version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
[[package]]
name = "hashbrown"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
[[package]]
name = "hbbytecode"
version = "0.1.0"
@ -129,7 +123,7 @@ name = "hblang"
version = "0.1.0"
dependencies = [
"env_logger",
"hashbrown 0.15.0",
"hashbrown",
"hbbytecode",
"hbvm",
"log",
@ -197,7 +191,7 @@ source = "git+https://github.com/jakubDoka/regalloc2?branch=reuse-allocations#41
dependencies = [
"allocator-api2",
"bumpalo",
"hashbrown 0.14.5",
"hashbrown",
"log",
"rustc-hash",
"smallvec",

View file

@ -8,7 +8,7 @@ name = "hbc"
path = "src/main.rs"
[dependencies]
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] }
hashbrown = { version = "0.14.5", default-features = false }
hbbytecode = { version = "0.1.0", path = "../hbbytecode" }
hbvm = { path = "../hbvm", features = ["nightly"] }
log = { version = "0.4.22", features = ["release_max_level_error"] }

View file

@ -741,35 +741,32 @@ impl Codegen {
fields: &[CommentOr<StructField>],
) -> ty::Struct {
let sym = pos.map(|pos| SymKey::Struct(file, pos));
if let Some(sym) = sym
&& let Some(&ty) = self.tys.syms.get(sym, &self.tys.ins)
if let Some(ref sym) = sym
&& let Some(&ty) = self.tys.syms.get(sym)
{
return ty.expand().inner();
}
let prev_tmp = self.tys.tmp.fields.len();
let prev_tmp = self.tys.fields_tmp.len();
for sf in fields.iter().filter_map(CommentOr::or) {
let f = Field { name: self.tys.names.intern(sf.name), ty: self.ty(&sf.ty) };
self.tys.tmp.fields.push(f);
let f = Field { name: self.tys.field_names.intern(sf.name), ty: self.ty(&sf.ty) };
self.tys.fields_tmp.push(f);
}
self.tys.ins.structs.push(Struct {
field_start: self.tys.ins.fields.len() as _,
pos: pos.unwrap_or(Pos::MAX),
self.tys.structs.push(Struct {
field_start: self.tys.fields.len() as _,
explicit_alignment,
file,
..Default::default()
});
self.tys.ins.fields.extend(self.tys.tmp.fields.drain(prev_tmp..));
self.tys.fields.extend(self.tys.fields_tmp.drain(prev_tmp..));
if let Some(sym) = sym {
self.tys.syms.insert(
sym,
ty::Kind::Struct(self.tys.ins.structs.len() as u32 - 1).compress(),
&self.tys.ins,
);
self.tys
.syms
.insert(sym, ty::Kind::Struct(self.tys.structs.len() as u32 - 1).compress());
}
self.tys.ins.structs.len() as u32 - 1
self.tys.structs.len() as u32 - 1
}
fn expr_ctx(&mut self, expr: &Expr, mut ctx: Ctx) -> Option<Value> {
@ -840,13 +837,13 @@ impl Codegen {
_ = self.assert_ty(index.pos(), index_val.ty, ty::Id::INT, "subsctipt");
if let ty::Kind::Ptr(ty) = base_val.ty.expand() {
base_val.ty = self.tys.ins.ptrs[ty as usize].base;
base_val.ty = self.tys.ptrs[ty as usize].base;
base_val.loc = base_val.loc.into_derefed();
}
match base_val.ty.expand() {
ty::Kind::Slice(arr) => {
let ty = self.tys.ins.arrays[arr as usize].ty;
let ty = self.tys.arrays[arr as usize].ty;
let item_size = self.tys.size_of(ty);
let Loc::Rt { derefed: true, ref mut reg, ref stack, offset } =
@ -887,7 +884,7 @@ impl Codegen {
self.report(func_ast.pos(), "first argument of inline needs to be a function");
};
let fuc = &self.tys.ins.funcs[func as usize];
let fuc = &self.tys.funcs[func as usize];
let fast = self.files[fuc.file as usize].clone();
let E::BinOp { right: &E::Closure { args: cargs, body, .. }, .. } =
fuc.expr.get(&fast).unwrap()
@ -902,7 +899,7 @@ impl Codegen {
if scope == self.ci.vars.len() {
for ((arg, ti), carg) in args.iter().zip(sig.args.range()).zip(cargs) {
let ty = self.tys.ins.args[ti];
let ty = self.tys.args[ti];
let loc = self.expr_ctx(arg, Ctx::default().with_ty(ty))?.loc;
self.ci.vars.push(Variable { id: carg.id, value: Value { ty, loc } });
}
@ -912,7 +909,7 @@ impl Codegen {
let loc = self.alloc_ret(sig.ret, ctx, true);
let prev_ret_reg = core::mem::replace(&mut self.ci.inline_ret_loc, loc);
let fuc = &self.tys.ins.funcs[func as usize];
let fuc = &self.tys.funcs[func as usize];
let prev_file = core::mem::replace(&mut self.ci.file, fuc.file);
let prev_ret = core::mem::replace(&mut self.ci.ret, Some(sig.ret));
self.expr(body);
@ -1126,8 +1123,8 @@ impl Codegen {
}
let reloc = Reloc::new(self.ci.code.len() as _, 3, 4);
let glob = self.tys.ins.globals.len() as ty::Global;
self.tys.ins.globals.push(Global { data: str, ..Default::default() });
let glob = self.tys.globals.len() as ty::Global;
self.tys.globals.push(Global { data: str, ..Default::default() });
self.ci
.relocs
.push(TypedReloc { target: ty::Kind::Global(glob).compress(), reloc });
@ -1176,7 +1173,7 @@ impl Codegen {
}
}
ty::Kind::Slice(arr) => {
let arr = self.tys.ins.arrays[arr as usize];
let arr = self.tys.arrays[arr as usize];
let item_size = self.tys.size_of(arr.ty);
for (i, value) in fields.iter().enumerate() {
let loc = loc.as_ref().offset(i as u32 * item_size);
@ -1207,7 +1204,7 @@ impl Codegen {
let mut tal = self.expr(target)?;
if let ty::Kind::Ptr(ty) = tal.ty.expand() {
tal.ty = self.tys.ins.ptrs[ty as usize].base;
tal.ty = self.tys.ptrs[ty as usize].base;
tal.loc = tal.loc.into_derefed();
}
@ -1308,7 +1305,7 @@ impl Codegen {
let val = self.expr(val)?;
match val.ty.expand() {
ty::Kind::Ptr(ty) => Some(Value {
ty: self.tys.ins.ptrs[ty as usize].base,
ty: self.tys.ptrs[ty as usize].base,
loc: Loc::reg(self.loc_to_reg(val.loc, self.tys.size_of(val.ty)))
.into_derefed(),
}),
@ -1350,7 +1347,7 @@ impl Codegen {
//self.output.trunc(&snap);
self.ci.vars.truncate(scope);
let fuc = &self.tys.ins.funcs[func as usize];
let fuc = &self.tys.funcs[func as usize];
let ast = self.files[fuc.file as usize].clone();
let E::BinOp { right: &E::Closure { args: cargs, .. }, .. } =
fuc.expr.get(&ast).unwrap()
@ -1366,7 +1363,7 @@ impl Codegen {
self.assert_arg_count(expr.pos(), args.len(), cargs.len(), "function call");
for (i, (arg, carg)) in args.iter().zip(cargs).enumerate() {
let ty = self.tys.ins.args[sig_args.next().unwrap()];
let ty = self.tys.args[sig_args.next().unwrap()];
let sym = parser::find_symbol(&ast.symbols, carg.id);
if sym.flags & idfl::COMPTIME != 0 {
sig_args.next().unwrap();
@ -1642,7 +1639,7 @@ impl Codegen {
if matches!(op, T::Add | T::Sub)
&& let ty::Kind::Ptr(ty) = ty::Kind::from_ty(ty)
{
let size = self.tys.size_of(self.tys.ins.ptrs[ty as usize].base);
let size = self.tys.size_of(self.tys.ptrs[ty as usize].base);
imm *= size as u64;
}
@ -1678,7 +1675,7 @@ impl Codegen {
let ty::Kind::Ptr(ty) = ty.expand() else { unreachable!() };
let size = self.tys.size_of(self.tys.ins.ptrs[ty as usize].base);
let size = self.tys.size_of(self.tys.ptrs[ty as usize].base);
self.ci.emit(muli64(offset, offset, size as _));
}
}
@ -1743,7 +1740,7 @@ impl Codegen {
}
fn compute_signature(&mut self, func: &mut ty::Func, pos: Pos, args: &[Expr]) -> Option<Sig> {
let fuc = &self.tys.ins.funcs[*func as usize];
let fuc = &self.tys.funcs[*func as usize];
let fast = self.files[fuc.file as usize].clone();
let Expr::BinOp { right: &Expr::Closure { args: cargs, ret, .. }, .. } =
fuc.expr.get(&fast).unwrap()
@ -1754,11 +1751,11 @@ impl Codegen {
Some(if let Some(sig) = fuc.sig {
sig
} else {
let arg_base = self.tys.ins.args.len();
let arg_base = self.tys.args.len();
for (arg, carg) in args.iter().zip(cargs) {
let ty = self.ty(&carg.ty);
self.tys.ins.args.push(ty);
self.tys.args.push(ty);
let sym = parser::find_symbol(&fast.symbols, carg.id);
let loc = if sym.flags & idfl::COMPTIME == 0 {
// FIXME: could fuck us
@ -1770,7 +1767,7 @@ impl Codegen {
"TODO: we dont support anything except type generics"
);
let arg = self.expr_ctx(arg, Ctx::default().with_ty(ty))?;
self.tys.ins.args.push(arg.loc.to_ty().unwrap());
self.tys.args.push(arg.loc.to_ty().unwrap());
arg.loc
};
@ -1781,13 +1778,11 @@ impl Codegen {
let ret = self.ty(ret);
let sym = SymKey::FuncInst(*func, args);
let ct = |ins: &mut crate::TypeIns| {
let func_id = ins.funcs.len();
let fuc = &ins.funcs[*func as usize];
ins.funcs.push(Func {
let ct = || {
let func_id = self.tys.funcs.len();
let fuc = &self.tys.funcs[*func as usize];
self.tys.funcs.push(Func {
file: fuc.file,
name: fuc.name,
base: Some(*func),
sig: Some(Sig { args, ret }),
expr: fuc.expr,
..Default::default()
@ -1795,7 +1790,7 @@ impl Codegen {
ty::Kind::Func(func_id as _).compress()
};
*func = self.tys.syms.get_or_insert(sym, &mut self.tys.ins, ct).expand().inner();
*func = self.tys.syms.entry(sym).or_insert_with(ct).expand().inner();
Sig { args, ret }
})
@ -1959,7 +1954,7 @@ impl Codegen {
}
}
ty::Kind::Slice(arr) => {
let arr = &self.tys.ins.arrays[arr as usize];
let arr = &self.tys.arrays[arr as usize];
if arr.len == ArrayLen::MAX {
ty = self.tys.make_array(arr.ty, field_len as _);
} else if arr.len != field_len as u32 {
@ -2048,7 +2043,7 @@ impl Codegen {
let ptr = self.ci.regs.allocate();
let reloc = Reloc::new(self.ci.code.len(), 3, 4);
let global = &mut self.tys.ins.globals[id as usize];
let global = &mut self.tys.globals[id as usize];
self.ci.relocs.push(TypedReloc { target: ty::Kind::Global(id).compress(), reloc });
self.ci.emit(instrs::lra(ptr.get(), 0, 0));
@ -2089,7 +2084,7 @@ impl Codegen {
}
fn handle_task(&mut self, FTask { file, id }: FTask) {
let func = &self.tys.ins.funcs[id as usize];
let func = &self.tys.funcs[id as usize];
debug_assert!(func.file == file);
let sig = func.sig.unwrap();
let ast = self.files[file as usize].clone();
@ -2119,10 +2114,10 @@ impl Codegen {
let mut parama = self.tys.parama(sig.ret);
let mut sig_args = sig.args.range();
for arg in args.iter() {
let ty = self.tys.ins.args[sig_args.next().unwrap()];
let ty = self.tys.args[sig_args.next().unwrap()];
let sym = parser::find_symbol(&ast.symbols, arg.id).flags;
let loc = match sym & idfl::COMPTIME != 0 {
true => Loc::ty(self.tys.ins.args[sig_args.next().unwrap()]),
true => Loc::ty(self.tys.args[sig_args.next().unwrap()]),
false => self.load_arg(sym, ty, &mut parama),
};
self.ci.vars.push(Variable { id: arg.id, value: Value { ty, loc } });
@ -2149,8 +2144,8 @@ impl Codegen {
self.ci.finalize();
self.ci.emit(jala(ZERO, RET_ADDR, 0));
self.ci.regs.free(core::mem::take(&mut self.ci.ret_reg));
self.tys.ins.funcs[id as usize].code.append(&mut self.ci.code);
self.tys.ins.funcs[id as usize].relocs.append(&mut self.ci.relocs);
self.tys.funcs[id as usize].code.append(&mut self.ci.code);
self.tys.funcs[id as usize].relocs.append(&mut self.ci.relocs);
self.pool.cis.push(core::mem::replace(&mut self.ci, prev_ci));
self.ct.vm.write_reg(reg::STACK_PTR, ct_stack_base);
}
@ -2428,14 +2423,15 @@ impl Codegen {
.compress();
self.ci.vars.truncate(prev_len);
self.ct.vm.write_reg(1, stru.repr() as u64);
debug_assert_ne!(stru.expand().inner(), 1);
}
trap::Trap::MomizedCall(trap::MomizedCall { func }) => {
if let Some(ty) = self.tys.ins.funcs[func as usize].computed {
let sym = SymKey::MomizedCall(func);
if let Some(&ty) = self.tys.syms.get(&sym) {
self.ct.vm.write_reg(1, ty.repr());
} else {
self.run_vm();
self.tys.ins.funcs[func as usize].computed =
Some(self.ct.vm.read_reg(1).0.into());
self.tys.syms.insert(sym, self.ct.vm.read_reg(1).0.into());
}
code_index += jal(0, 0, 0).0 + tx().0;
}
@ -2467,9 +2463,9 @@ impl Codegen {
};
let key = SymKey::Decl(file, ident);
if let Some(existing) = self.tys.syms.get(key, &self.tys.ins) {
if let Some(existing) = self.tys.syms.get(&key) {
if let ty::Kind::Func(id) = existing.expand()
&& let func = &mut self.tys.ins.funcs[id as usize]
&& let func = &mut self.tys.funcs[id as usize]
&& let Err(idx) = task::unpack(func.offset)
&& idx < self.tasks.len()
{
@ -2483,23 +2479,22 @@ impl Codegen {
let prev_file = core::mem::replace(&mut self.ci.file, file);
let sym = match expr {
Expr::BinOp {
left: &Expr::Ident { id, .. },
left: &Expr::Ident { .. },
op: TokenKind::Decl,
right: &Expr::Closure { pos, args, ret, .. },
} => {
let func = Func {
file,
name: id,
sig: 'b: {
let arg_base = self.tys.ins.args.len();
let arg_base = self.tys.args.len();
for arg in args {
let sym = find_symbol(&self.files[file as usize].symbols, arg.id);
if sym.flags & idfl::COMPTIME != 0 {
self.tys.ins.args.truncate(arg_base);
self.tys.args.truncate(arg_base);
break 'b None;
}
let ty = self.ty(&arg.ty);
self.tys.ins.args.push(ty);
self.tys.args.push(ty);
}
let args = self.pack_args(pos, arg_base);
@ -2515,8 +2510,8 @@ impl Codegen {
..Default::default()
};
let id = self.tys.ins.funcs.len() as _;
self.tys.ins.funcs.push(func);
let id = self.tys.funcs.len() as _;
self.tys.funcs.push(func);
ty::Kind::Func(id)
}
@ -2526,12 +2521,12 @@ impl Codegen {
right: stru @ Expr::Struct { .. },
} => {
let str = self.ty(stru).expand().inner();
self.tys.ins.structs[str as usize].name = id;
self.tys.structs[str as usize].name = id;
ty::Kind::Struct(str)
}
Expr::BinOp { left, op: TokenKind::Decl, right } => {
let gid = self.tys.ins.globals.len() as ty::Global;
self.tys.ins.globals.push(Global { file, name: ident, ..Default::default() });
let gid = self.tys.globals.len() as ty::Global;
self.tys.globals.push(Global { file, name: ident, ..Default::default() });
let ci = ItemCtx {
file,
@ -2540,7 +2535,7 @@ impl Codegen {
};
_ = left.find_pattern_path(ident, right, |expr| {
self.tys.ins.globals[gid as usize] = self
self.tys.globals[gid as usize] = self
.ct_eval(ci, |s, _| Ok::<_, !>(s.generate_global(expr, file, ident)))
.into_ok();
});
@ -2550,12 +2545,12 @@ impl Codegen {
e => unimplemented!("{e:#?}"),
};
self.ci.file = prev_file;
self.tys.syms.insert(key, sym.compress(), &self.tys.ins);
self.tys.syms.insert(key, sym.compress());
sym
}
fn make_func_reachable(&mut self, func: ty::Func) {
let fuc = &mut self.tys.ins.funcs[func as usize];
let fuc = &mut self.tys.funcs[func as usize];
if fuc.offset == u32::MAX {
fuc.offset = task::id(self.tasks.len() as _);
self.tasks.push(Some(FTask { file: fuc.file, id: func }));
@ -2585,7 +2580,7 @@ impl Codegen {
self.ci.free_loc(ret.loc);
Global { ty: ret.ty, file, name, data, ..Default::default() }
Global { ty: ret.ty, file, name, data, ast: ExprRef::new(expr), ..Default::default() }
}
fn ct_eval<T, E>(
@ -2609,11 +2604,11 @@ impl Codegen {
}
if ret.is_ok() {
let last_fn = self.tys.ins.funcs.len();
self.tys.ins.funcs.push(Default::default());
let last_fn = self.tys.funcs.len();
self.tys.funcs.push(Default::default());
self.tys.ins.funcs[last_fn].code = core::mem::take(&mut self.ci.code);
self.tys.ins.funcs[last_fn].relocs = core::mem::take(&mut self.ci.relocs);
self.tys.funcs[last_fn].code = core::mem::take(&mut self.ci.code);
self.tys.funcs[last_fn].relocs = core::mem::take(&mut self.ci.relocs);
if is_on_stack {
let size =
@ -2624,8 +2619,7 @@ impl Codegen {
}
self.tys.dump_reachable(last_fn as _, &mut self.ct.code);
let entry =
&mut self.ct.code[self.tys.ins.funcs[last_fn].offset as usize] as *mut _ as _;
let entry = &mut self.ct.code[self.tys.funcs[last_fn].offset as usize] as *mut _ as _;
let prev_pc = core::mem::replace(&mut self.ct.vm.pc, hbvm::mem::Address::new(entry))
- self.ct.code.as_ptr() as usize;
@ -2646,7 +2640,7 @@ impl Codegen {
self.run_vm();
self.ct.vm.pc = prev_pc + self.ct.code.as_ptr() as usize;
let func = self.tys.ins.funcs.pop().unwrap();
let func = self.tys.funcs.pop().unwrap();
self.ci.code = func.code;
self.ci.code.clear();
self.ci.relocs = func.relocs;
@ -2735,15 +2729,15 @@ impl Codegen {
}
fn pack_args(&mut self, pos: Pos, arg_base: usize) -> ty::Tuple {
let needle = &self.tys.ins.args[arg_base..];
let needle = &self.tys.args[arg_base..];
if needle.is_empty() {
return ty::Tuple::empty();
}
let len = needle.len();
// FIXME: maybe later when this becomes a bottleneck we use more
// efficient search (SIMD?, indexing?)
let sp = self.tys.ins.args.windows(needle.len()).position(|val| val == needle).unwrap();
self.tys.ins.args.truncate((sp + needle.len()).max(arg_base));
let sp = self.tys.args.windows(needle.len()).position(|val| val == needle).unwrap();
self.tys.args.truncate((sp + needle.len()).max(arg_base));
ty::Tuple::new(sp, len)
.unwrap_or_else(|| self.report(pos, "amount of arguments not supported"))
}
@ -2759,8 +2753,8 @@ impl Codegen {
}
pub fn assemble(&mut self, buf: &mut Vec<u8>) {
self.tys.ins.funcs.iter_mut().for_each(|f| f.offset = u32::MAX);
self.tys.ins.globals.iter_mut().for_each(|g| g.offset = u32::MAX);
self.tys.funcs.iter_mut().for_each(|f| f.offset = u32::MAX);
self.tys.globals.iter_mut().for_each(|g| g.offset = u32::MAX);
self.tys.assemble(buf)
}
}
@ -2770,9 +2764,6 @@ mod tests {
use alloc::{string::String, vec::Vec};
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
_ = log::set_logger(&crate::fs::Logger);
log::set_max_level(log::LevelFilter::Error);
let mut codegen =
super::Codegen { files: crate::test_parse_files(ident, input), ..Default::default() };

View file

@ -36,7 +36,7 @@ use {
ty::ArrayLen,
},
alloc::{collections::BTreeMap, string::String, vec::Vec},
core::{cell::Cell, fmt::Display, ops::Range},
core::{cell::Cell, fmt::Display, hash::BuildHasher, ops::Range, usize},
hashbrown::hash_map,
hbbytecode as instrs,
};
@ -69,114 +69,6 @@ pub mod son;
mod lexer;
mod vc;
mod ctx_map {
use core::hash::BuildHasher;
pub type Hash = u64;
pub type HashBuilder = core::hash::BuildHasherDefault<IdentityHasher>;
#[derive(Default)]
pub struct IdentityHasher(u64);
impl core::hash::Hasher for IdentityHasher {
fn finish(&self) -> u64 {
self.0
}
fn write(&mut self, _: &[u8]) {
unimplemented!()
}
fn write_u64(&mut self, i: u64) {
self.0 = i;
}
}
pub struct Key<T> {
pub value: T,
pub hash: Hash,
}
impl<T> core::hash::Hash for Key<T> {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.hash);
}
}
pub trait CtxEntry {
type Ctx: ?Sized;
type Key<'a>: Eq + core::hash::Hash;
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a>;
}
pub struct CtxMap<T> {
inner: hashbrown::HashMap<Key<T>, (), HashBuilder>,
}
impl<T> Default for CtxMap<T> {
fn default() -> Self {
Self { inner: Default::default() }
}
}
impl<T: CtxEntry> CtxMap<T> {
pub fn entry<'a, 'b>(
&'a mut self,
key: T::Key<'b>,
ctx: &'b T::Ctx,
) -> (hashbrown::hash_map::RawEntryMut<'a, Key<T>, (), HashBuilder>, Hash) {
let hash = crate::FnvBuildHasher::default().hash_one(&key);
(self.inner.raw_entry_mut().from_hash(hash, |k| k.value.key(ctx) == key), hash)
}
pub fn get<'a>(&self, key: T::Key<'a>, ctx: &'a T::Ctx) -> Option<&T> {
let hash = crate::FnvBuildHasher::default().hash_one(&key);
self.inner
.raw_entry()
.from_hash(hash, |k| k.value.key(ctx) == key)
.map(|(k, _)| &k.value)
}
pub fn clear(&mut self) {
self.inner.clear();
}
pub fn remove(&mut self, value: &T, ctx: &T::Ctx) -> Option<T> {
let (entry, _) = self.entry(value.key(ctx), ctx);
match entry {
hashbrown::hash_map::RawEntryMut::Occupied(o) => Some(o.remove_entry().0.value),
hashbrown::hash_map::RawEntryMut::Vacant(_) => None,
}
}
pub fn insert<'a>(&mut self, key: T::Key<'a>, value: T, ctx: &'a T::Ctx) {
let (entry, hash) = self.entry(key, ctx);
match entry {
hashbrown::hash_map::RawEntryMut::Occupied(_) => unreachable!(),
hashbrown::hash_map::RawEntryMut::Vacant(v) => {
_ = v.insert(Key { hash, value }, ())
}
}
}
pub fn get_or_insert<'a>(
&mut self,
key: T::Key<'a>,
ctx: &'a mut T::Ctx,
with: impl FnOnce(&'a mut T::Ctx) -> T,
) -> &mut T {
let (entry, hash) = self.entry(key, unsafe { &mut *(&mut *ctx as *mut _) });
match entry {
hashbrown::hash_map::RawEntryMut::Occupied(o) => &mut o.into_key_value().0.value,
hashbrown::hash_map::RawEntryMut::Vacant(v) => {
&mut v.insert(Key { hash, value: with(ctx) }, ()).0.value
}
}
}
}
}
mod task {
use super::Offset;
@ -230,9 +122,9 @@ mod ty {
crate::{
ident,
lexer::TokenKind,
parser::{self, Pos},
parser::{self},
},
core::{num::NonZeroU32, ops::Range},
core::{num::NonZeroU32, ops::Range, usize},
};
pub type ArrayLen = u32;
@ -261,6 +153,10 @@ mod ty {
Some(Self((pos << Self::LEN_BITS | len) as u32))
}
//pub fn view(self, slice: &[Id]) -> &[Id] {
// &slice[self.0 as usize >> Self::LEN_BITS..][..self.len()]
//}
pub fn range(self) -> Range<usize> {
let start = self.0 as usize >> Self::LEN_BITS;
start..start + self.len()
@ -273,41 +169,15 @@ mod ty {
pub fn empty() -> Self {
Self(0)
}
pub fn repr(&self) -> u32 {
self.0
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub struct Id(NonZeroU32);
impl crate::ctx_map::CtxEntry for Id {
type Ctx = crate::TypeIns;
type Key<'a> = crate::SymKey<'a>;
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
match self.expand() {
Kind::Struct(s) => {
let st = &ctx.structs[s as usize];
debug_assert_ne!(st.pos, Pos::MAX);
crate::SymKey::Struct(st.file, st.pos)
}
Kind::Ptr(p) => crate::SymKey::Pointer(&ctx.ptrs[p as usize]),
Kind::Func(f) => {
let fc = &ctx.funcs[f as usize];
if let Some(base) = fc.base {
crate::SymKey::FuncInst(base, fc.sig.unwrap().args)
} else {
crate::SymKey::Decl(fc.file, fc.name)
}
}
Kind::Global(g) => {
let gb = &ctx.globals[g as usize];
crate::SymKey::Decl(gb.file, gb.name)
}
Kind::Slice(s) => crate::SymKey::Array(&ctx.arrays[s as usize]),
Kind::Module(_) | Kind::Builtin(_) => crate::SymKey::Decl(u32::MAX, u32::MAX),
}
}
}
impl Default for Id {
fn default() -> Self {
Self(unsafe { NonZeroU32::new_unchecked(UNDECLARED) })
@ -517,10 +387,10 @@ mod ty {
TK::Module(idx) => write!(f, "@use({:?})[{}]", self.files[idx as usize].path, idx),
TK::Builtin(ty) => write!(f, "{}", to_str(ty)),
TK::Ptr(ty) => {
write!(f, "^{}", self.rety(self.tys.ins.ptrs[ty as usize].base))
write!(f, "^{}", self.rety(self.tys.ptrs[ty as usize].base))
}
TK::Struct(idx) => {
let record = &self.tys.ins.structs[idx as usize];
let record = &self.tys.structs[idx as usize];
if ident::is_null(record.name) {
write!(f, "[{idx}]{{")?;
for (i, &super::Field { name, ty }) in
@ -529,7 +399,12 @@ mod ty {
if i != 0 {
write!(f, ", ")?;
}
write!(f, "{}: {}", self.tys.names.ident_str(name), self.rety(ty))?;
write!(
f,
"{}: {}",
self.tys.field_names.ident_str(name),
self.rety(ty)
)?;
}
write!(f, "}}")
} else {
@ -540,7 +415,7 @@ mod ty {
TK::Func(idx) => write!(f, "fn{idx}"),
TK::Global(idx) => write!(f, "global{idx}"),
TK::Slice(idx) => {
let array = self.tys.ins.arrays[idx as usize];
let array = self.tys.arrays[idx as usize];
match array.len {
ArrayLen::MAX => write!(f, "[{}]", self.rety(array.ty)),
len => write!(f, "[{}; {len}]", self.rety(array.ty)),
@ -567,13 +442,15 @@ fn emit(out: &mut Vec<u8>, (len, instr): EncodedInstr) {
out.extend_from_slice(&instr[..len]);
}
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub enum SymKey<'a> {
Pointer(&'a Ptr),
#[derive(PartialEq, Eq, Hash, Debug)]
enum SymKey {
Pointer(ty::Id),
Struct(FileId, Pos),
FuncInst(ty::Func, ty::Tuple),
MomizedCall(ty::Func),
Decl(FileId, Ident),
Array(&'a Array),
Slice(ty::Id),
Array(ty::Id, ArrayLen),
}
#[derive(Clone, Copy)]
@ -584,9 +461,6 @@ struct Sig {
struct Func {
file: FileId,
name: Ident,
base: Option<ty::Func>,
computed: Option<ty::Id>,
expr: ExprRef,
sig: Option<Sig>,
offset: Offset,
@ -599,9 +473,6 @@ impl Default for Func {
fn default() -> Self {
Self {
file: u32::MAX,
name: 0,
base: None,
computed: None,
expr: Default::default(),
sig: None,
offset: u32::MAX,
@ -620,6 +491,7 @@ struct Global {
file: FileId,
name: Ident,
ty: ty::Id,
ast: ExprRef,
offset: Offset,
data: Vec<u8>,
}
@ -630,6 +502,7 @@ impl Default for Global {
ty: Default::default(),
offset: u32::MAX,
data: Default::default(),
ast: ExprRef::default(),
file: u32::MAX,
name: u32::MAX,
}
@ -671,7 +544,6 @@ struct Field {
#[derive(Default)]
struct Struct {
name: Ident,
pos: Pos,
file: FileId,
size: Cell<Size>,
align: Cell<u8>,
@ -679,13 +551,12 @@ struct Struct {
field_start: u32,
}
#[derive(PartialEq, Eq, Hash)]
pub struct Ptr {
struct Ptr {
base: ty::Id,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Array {
#[derive(Clone, Copy)]
struct Array {
ty: ty::Id,
len: ArrayLen,
}
@ -715,30 +586,56 @@ struct AbleOsExecutableHeader {
metadata_length: u64,
}
impl ctx_map::CtxEntry for Ident {
type Ctx = str;
type Key<'a> = &'a str;
struct IdentEntry {
hash: u32,
ident: Ident,
}
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
unsafe { ctx.get_unchecked(ident::range(*self)) }
impl core::hash::Hash for IdentEntry {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
state.write_u64((self.hash as u64) << 32);
}
}
#[derive(Default)]
struct IdentityHasher(u64);
impl core::hash::Hasher for IdentityHasher {
fn finish(&self) -> u64 {
self.0
}
fn write(&mut self, _: &[u8]) {
unimplemented!()
}
fn write_u64(&mut self, i: u64) {
self.0 = i;
}
}
#[derive(Default)]
struct IdentInterner {
lookup: ctx_map::CtxMap<Ident>,
lookup: hashbrown::HashMap<IdentEntry, (), core::hash::BuildHasherDefault<IdentityHasher>>,
strings: String,
}
impl IdentInterner {
fn intern(&mut self, ident: &str) -> Ident {
let (entry, hash) = self.lookup.entry(ident, &self.strings);
match entry {
hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.value,
let hash = FnvBuildHasher::default().hash_one(ident) & 0xFFFFFFFF00000000;
match self.lookup.raw_entry_mut().from_hash(
hash,
|k| unsafe { self.strings.get_unchecked(ident::range(k.ident)) } == ident,
) {
hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.ident,
hash_map::RawEntryMut::Vacant(v) => {
let id = ident::new(self.strings.len() as _, ident.len() as _);
self.strings.push_str(ident);
v.insert(ctx_map::Key { hash, value: id }, ());
v.insert_hashed_nocheck(
hash,
IdentEntry { hash: (hash >> 32) as _, ident: id },
(),
);
id
}
}
@ -749,52 +646,49 @@ impl IdentInterner {
}
fn project(&self, ident: &str) -> Option<Ident> {
self.lookup.get(ident, &self.strings).copied()
let hash = FnvBuildHasher::default().hash_one(ident) & 0xFFFFFFFF00000000;
self.lookup
.raw_entry()
.from_hash(
hash,
|k| unsafe { self.strings.get_unchecked(ident::range(k.ident)) } == ident,
)
.map(|(k, _)| k.ident)
}
}
#[derive(Default)]
struct TypesTmp {
fields: Vec<Field>,
frontier: Vec<ty::Id>,
globals: Vec<ty::Global>,
funcs: Vec<ty::Func>,
}
#[derive(Default)]
pub struct TypeIns {
struct Types {
syms: HashMap<SymKey, ty::Id>,
funcs: Vec<Func>,
args: Vec<ty::Id>,
globals: Vec<Global>,
structs: Vec<Struct>,
fields: Vec<Field>,
field_names: IdentInterner,
ptrs: Vec<Ptr>,
arrays: Vec<Array>,
}
#[derive(Default)]
struct Types {
syms: ctx_map::CtxMap<ty::Id>,
names: IdentInterner,
ins: TypeIns,
tmp: TypesTmp,
fields_tmp: Vec<Field>,
frontier_tmp: Vec<ty::Id>,
reachable_globals: Vec<ty::Global>,
reachable_funcs: Vec<ty::Func>,
}
const HEADER_SIZE: usize = core::mem::size_of::<AbleOsExecutableHeader>();
impl Types {
fn struct_field_range(&self, strct: ty::Struct) -> Range<usize> {
let start = self.ins.structs[strct as usize].field_start as usize;
let start = self.structs[strct as usize].field_start as usize;
let end = self
.ins
.structs
.get(strct as usize + 1)
.map_or(self.ins.fields.len(), |s| s.field_start as usize);
.map_or(self.fields.len(), |s| s.field_start as usize);
start..end
}
fn struct_fields(&self, strct: ty::Struct) -> &[Field] {
&self.ins.fields[self.struct_field_range(strct)]
&self.fields[self.struct_field_range(strct)]
}
fn find_type(
@ -804,10 +698,10 @@ impl Types {
files: &[parser::Ast],
) -> Option<ty::Id> {
if let Ok(id) = id
&& let Some(&ty) = self.syms.get(SymKey::Decl(file, id), &self.ins)
&& let Some(&ty) = self.syms.get(&SymKey::Decl(file, id))
{
if let ty::Kind::Global(g) = ty.expand() {
let g = &self.ins.globals[g as usize];
let g = &self.globals[g as usize];
if g.ty == ty::Id::TYPE {
return Some(ty::Id::from(
u32::from_ne_bytes(*g.data.first_chunk().unwrap()) as u64
@ -825,10 +719,10 @@ impl Types {
.find_pattern_path(name, right, |right| self.ty(file, right, files))
.unwrap_or_else(|_| unreachable!())?;
if let ty::Kind::Struct(s) = ty.expand() {
self.ins.structs[s as usize].name = name;
self.structs[s as usize].name = name;
}
self.syms.insert(SymKey::Decl(file, name), ty, &self.ins);
self.syms.insert(SymKey::Decl(file, name), ty);
Some(ty)
}
@ -859,30 +753,29 @@ impl Types {
}
Expr::Struct { pos, fields, packed, .. } => {
let sym = SymKey::Struct(file, pos);
if let Some(&ty) = self.syms.get(sym, &self.ins) {
if let Some(&ty) = self.syms.get(&sym) {
return Some(ty);
}
let prev_tmp = self.tmp.fields.len();
let prev_tmp = self.fields_tmp.len();
for field in fields.iter().filter_map(CommentOr::or) {
let Some(ty) = self.ty(file, &field.ty, files) else {
self.tmp.fields.truncate(prev_tmp);
self.fields_tmp.truncate(prev_tmp);
return None;
};
self.tmp.fields.push(Field { name: self.names.intern(field.name), ty });
self.fields_tmp.push(Field { name: self.field_names.intern(field.name), ty });
}
self.ins.structs.push(Struct {
self.structs.push(Struct {
file,
pos,
field_start: self.ins.fields.len() as _,
field_start: self.fields.len() as _,
explicit_alignment: packed.then_some(1),
..Default::default()
});
self.ins.fields.extend(self.tmp.fields.drain(prev_tmp..));
self.fields.extend(self.fields_tmp.drain(prev_tmp..));
let ty = ty::Kind::Struct(self.ins.structs.len() as u32 - 1).compress();
self.syms.insert(sym, ty, &self.ins);
let ty = ty::Kind::Struct(self.structs.len() as u32 - 1).compress();
self.syms.insert(sym, ty);
ty
}
_ => return None,
@ -895,62 +788,62 @@ impl Types {
emit(to, instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
emit(to, instrs::tx());
let exe = self.dump_reachable(0, to);
Reloc::new(HEADER_SIZE, 3, 4).apply_jump(to, self.ins.funcs[0].offset, 0);
Reloc::new(HEADER_SIZE, 3, 4).apply_jump(to, self.funcs[0].offset, 0);
unsafe { *to.as_mut_ptr().cast::<AbleOsExecutableHeader>() = exe }
}
fn dump_reachable(&mut self, from: ty::Func, to: &mut Vec<u8>) -> AbleOsExecutableHeader {
debug_assert!(self.tmp.frontier.is_empty());
debug_assert!(self.tmp.funcs.is_empty());
debug_assert!(self.tmp.globals.is_empty());
debug_assert!(self.frontier_tmp.is_empty());
debug_assert!(self.reachable_funcs.is_empty());
debug_assert!(self.reachable_globals.is_empty());
self.tmp.frontier.push(ty::Kind::Func(from).compress());
while let Some(itm) = self.tmp.frontier.pop() {
self.frontier_tmp.push(ty::Kind::Func(from).compress());
while let Some(itm) = self.frontier_tmp.pop() {
match itm.expand() {
ty::Kind::Func(func) => {
let fuc = &mut self.ins.funcs[func as usize];
let fuc = &mut self.funcs[func as usize];
if task::is_done(fuc.offset) {
continue;
}
fuc.offset = 0;
self.tmp.funcs.push(func);
self.tmp.frontier.extend(fuc.relocs.iter().map(|r| r.target));
self.reachable_funcs.push(func);
self.frontier_tmp.extend(fuc.relocs.iter().map(|r| r.target));
}
ty::Kind::Global(glob) => {
let glb = &mut self.ins.globals[glob as usize];
let glb = &mut self.globals[glob as usize];
if task::is_done(glb.offset) {
continue;
}
glb.offset = 0;
self.tmp.globals.push(glob);
self.reachable_globals.push(glob);
}
_ => unreachable!(),
}
}
for &func in &self.tmp.funcs {
let fuc = &mut self.ins.funcs[func as usize];
for &func in &self.reachable_funcs {
let fuc = &mut self.funcs[func as usize];
fuc.offset = to.len() as _;
to.extend(&fuc.code);
}
let code_length = to.len();
for global in self.tmp.globals.drain(..) {
let global = &mut self.ins.globals[global as usize];
for global in self.reachable_globals.drain(..) {
let global = &mut self.globals[global as usize];
global.offset = to.len() as _;
to.extend(&global.data);
}
let data_length = to.len() - code_length;
for func in self.tmp.funcs.drain(..) {
let fuc = &self.ins.funcs[func as usize];
for func in self.reachable_funcs.drain(..) {
let fuc = &self.funcs[func as usize];
for rel in &fuc.relocs {
let offset = match rel.target.expand() {
ty::Kind::Func(fun) => self.ins.funcs[fun as usize].offset,
ty::Kind::Global(glo) => self.ins.globals[glo as usize].offset,
ty::Kind::Func(fun) => self.funcs[fun as usize].offset,
ty::Kind::Global(glo) => self.globals[glo as usize].offset,
_ => unreachable!(),
};
rel.reloc.apply_jump(to, offset, fuc.offset);
@ -977,7 +870,6 @@ impl Types {
) -> Result<(), hbbytecode::DisasmError<'a>> {
use instrs::DisasmItem;
let functions = self
.ins
.funcs
.iter()
.filter(|f| task::is_done(f.offset))
@ -995,7 +887,7 @@ impl Types {
};
(f.offset, (name, f.code.len() as u32, DisasmItem::Func))
})
.chain(self.ins.globals.iter().filter(|g| task::is_done(g.offset)).map(|g| {
.chain(self.globals.iter().filter(|g| task::is_done(g.offset)).map(|g| {
let name = if g.file == u32::MAX {
core::str::from_utf8(&g.data).unwrap()
} else {
@ -1017,25 +909,14 @@ impl Types {
}
fn make_ptr_low(&mut self, base: ty::Id) -> ty::Ptr {
let ptr = Ptr { base };
let (entry, hash) = self.syms.entry(SymKey::Pointer(&ptr), &self.ins);
match entry {
hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.value,
hash_map::RawEntryMut::Vacant(v) => {
self.ins.ptrs.push(ptr);
v.insert(
ctx_map::Key {
value: ty::Kind::Ptr(self.ins.ptrs.len() as u32 - 1).compress(),
hash,
},
(),
)
.0
.value
}
}
.expand()
.inner()
self.syms
.entry(SymKey::Pointer(base))
.or_insert_with(|| {
self.ptrs.push(Ptr { base });
ty::Kind::Ptr(self.ptrs.len() as u32 - 1).compress()
})
.expand()
.inner()
}
fn make_array(&mut self, ty: ty::Id, len: ArrayLen) -> ty::Id {
@ -1044,32 +925,13 @@ impl Types {
fn make_array_low(&mut self, ty: ty::Id, len: ArrayLen) -> ty::Slice {
self.syms
.get_or_insert(SymKey::Array(&Array { ty, len }), &mut self.ins, |ins| {
ins.arrays.push(Array { ty, len });
ty::Kind::Slice(ins.arrays.len() as u32 - 1).compress()
.entry(SymKey::Array(ty, len))
.or_insert_with(|| {
self.arrays.push(Array { ty, len });
ty::Kind::Slice(self.arrays.len() as u32 - 1).compress()
})
.expand()
.inner()
//let array = Array { ty, len };
//let (entry, hash) = self.syms.entry(SymKey::Array(&array), &self.ins);
//match entry {
// hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.value,
// hash_map::RawEntryMut::Vacant(v) => {
// self.ins.arrays.push(array);
// v.insert(
// ctx_map::Key {
// value: ty::Kind::Slice(self.ins.ptrs.len() as u32 - 1).compress(),
// hash,
// },
// (),
// )
// .0
// .value
// }
//}
//.expand()
//.inner()
}
fn size_of(&self, ty: ty::Id) -> Size {
@ -1082,7 +944,7 @@ impl Types {
ty::Kind::Builtin(ty::I16 | ty::U16) => 2,
ty::Kind::Builtin(ty::I8 | ty::U8 | ty::BOOL) => 1,
ty::Kind::Slice(arr) => {
let arr = &self.ins.arrays[arr as usize];
let arr = &self.arrays[arr as usize];
match arr.len {
0 => 0,
ArrayLen::MAX => 16,
@ -1090,13 +952,13 @@ impl Types {
}
}
ty::Kind::Struct(stru) => {
if self.ins.structs[stru as usize].size.get() != 0 {
return self.ins.structs[stru as usize].size.get();
if self.structs[stru as usize].size.get() != 0 {
return self.structs[stru as usize].size.get();
}
let mut oiter = OffsetIter::new(stru, self);
while oiter.next(self).is_some() {}
self.ins.structs[stru as usize].size.set(oiter.offset);
self.structs[stru as usize].size.set(oiter.offset);
oiter.offset
}
ty => unimplemented!("size_of: {:?}", ty),
@ -1106,10 +968,10 @@ impl Types {
fn align_of(&self, ty: ty::Id) -> Size {
match ty.expand() {
ty::Kind::Struct(stru) => {
if self.ins.structs[stru as usize].align.get() != 0 {
return self.ins.structs[stru as usize].align.get() as _;
if self.structs[stru as usize].align.get() != 0 {
return self.structs[stru as usize].align.get() as _;
}
let align = self.ins.structs[stru as usize].explicit_alignment.map_or_else(
let align = self.structs[stru as usize].explicit_alignment.map_or_else(
|| {
self.struct_fields(stru)
.iter()
@ -1119,11 +981,11 @@ impl Types {
},
|a| a as _,
);
self.ins.structs[stru as usize].align.set(align.try_into().unwrap());
self.structs[stru as usize].align.set(align.try_into().unwrap());
align
}
ty::Kind::Slice(arr) => {
let arr = &self.ins.arrays[arr as usize];
let arr = &self.arrays[arr as usize];
match arr.len {
ArrayLen::MAX => 8,
_ => self.align_of(arr.ty),
@ -1135,13 +997,13 @@ impl Types {
fn base_of(&self, ty: ty::Id) -> Option<ty::Id> {
match ty.expand() {
ty::Kind::Ptr(p) => Some(self.ins.ptrs[p as usize].base),
ty::Kind::Ptr(p) => Some(self.ptrs[p as usize].base),
_ => None,
}
}
fn find_struct_field(&self, s: ty::Struct, name: &str) -> Option<usize> {
let name = self.names.project(name)?;
let name = self.field_names.project(name)?;
self.struct_fields(s).iter().position(|f| f.name == name)
}
}
@ -1158,7 +1020,7 @@ impl OffsetIter {
}
fn offset_of(tys: &Types, idx: ty::Struct, field: &str) -> Option<(Offset, ty::Id)> {
let field_id = tys.names.project(field)?;
let field_id = tys.field_names.project(field)?;
OffsetIter::new(idx, tys)
.into_iter(tys)
.find(|(f, _)| f.name == field_id)
@ -1166,8 +1028,8 @@ impl OffsetIter {
}
fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a Field, Offset)> {
let stru = &tys.ins.structs[self.strct as usize];
let field = &tys.ins.fields[self.fields.next()?];
let stru = &tys.structs[self.strct as usize];
let field = &tys.fields[self.fields.next()?];
let align = stru.explicit_alignment.map_or_else(|| tys.align_of(field.ty), |a| a as u32);
self.offset = (self.offset + align - 1) & !(align - 1);

View file

@ -1,6 +1,5 @@
use {
crate::{
ctx_map::CtxEntry,
ident::Ident,
instrs,
lexer::{self, TokenKind},
@ -12,7 +11,7 @@ use {
task,
ty::{self},
vc::{BitSet, Vc},
Func, HashMap, Offset, OffsetIter, Reloc, Sig, SymKey, TypedReloc, Types,
Func, HashMap, IdentityHasher, Offset, OffsetIter, Reloc, Sig, SymKey, TypedReloc, Types,
},
alloc::{borrow::ToOwned, string::String, vec::Vec},
core::{
@ -20,10 +19,13 @@ use {
cell::RefCell,
convert::identity,
fmt::{self, Debug, Display, Write},
format_args as fa, mem, ops,
format_args as fa,
hash::{BuildHasher, Hasher},
mem, ops,
},
hashbrown::hash_map,
regalloc2::VReg,
std::process::id,
};
const VOID: Nid = 0;
@ -42,17 +44,19 @@ pub mod reg {
pub type Reg = u8;
}
type Lookup = crate::ctx_map::CtxMap<Nid>;
struct LookupEntry {
nid: Nid,
hash: u64,
}
impl crate::ctx_map::CtxEntry for Nid {
type Ctx = [Result<Node, Nid>];
type Key<'a> = (Kind, &'a [Nid], ty::Id);
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
ctx[*self as usize].as_ref().unwrap().key()
impl core::hash::Hash for LookupEntry {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u64(self.hash);
}
}
type Lookup = hashbrown::HashMap<LookupEntry, (), core::hash::BuildHasherDefault<IdentityHasher>>;
struct Nodes {
values: Vec<Result<Node, Nid>>,
visited: BitSet,
@ -102,10 +106,10 @@ impl Nodes {
let mut lookup_meta = None;
if !node.is_lazy_phi() {
let (raw_entry, hash) = self.lookup.entry(node.key(), &self.values);
let (raw_entry, hash) = Self::find_node(&mut self.lookup, &self.values, &node);
let entry = match raw_entry {
hash_map::RawEntryMut::Occupied(o) => return o.get_key_value().0.value,
hash_map::RawEntryMut::Occupied(o) => return o.get_key_value().0.nid,
hash_map::RawEntryMut::Vacant(v) => v,
};
@ -125,14 +129,38 @@ impl Nodes {
self.free = mem::replace(&mut self.values[free as usize], Ok(node)).unwrap_err();
if let Some((entry, hash)) = lookup_meta {
entry.insert(crate::ctx_map::Key { value: free, hash }, ());
entry.insert(LookupEntry { nid: free, hash }, ());
}
free
}
fn find_node<'a>(
lookup: &'a mut Lookup,
values: &[Result<Node, Nid>],
node: &Node,
) -> (
hash_map::RawEntryMut<'a, LookupEntry, (), core::hash::BuildHasherDefault<IdentityHasher>>,
u64,
) {
let hash = crate::FnvBuildHasher::default().hash_one(node.key());
let entry = lookup
.raw_entry_mut()
.from_hash(hash, |n| values[n.nid as usize].as_ref().unwrap().key() == node.key());
(entry, hash)
}
fn remove_node_lookup(&mut self, target: Nid) {
if !self[target].is_lazy_phi() {
self.lookup.remove(&target, &self.values).unwrap();
match Self::find_node(
&mut self.lookup,
&self.values,
self.values[target as usize].as_ref().unwrap(),
)
.0
{
hash_map::RawEntryMut::Occupied(o) => o.remove(),
hash_map::RawEntryMut::Vacant(_) => unreachable!(),
};
}
}
@ -351,16 +379,20 @@ impl Nodes {
let prev = self[target].inputs[inp_index];
self[target].inputs[inp_index] = with;
let (entry, hash) = self.lookup.entry(target.key(&self.values), &self.values);
let (entry, hash) = Self::find_node(
&mut self.lookup,
&self.values,
self.values[target as usize].as_ref().unwrap(),
);
match entry {
hash_map::RawEntryMut::Occupied(other) => {
let rpl = other.get_key_value().0.value;
let rpl = other.get_key_value().0.nid;
self[target].inputs[inp_index] = prev;
self.replace(target, rpl);
rpl
}
hash_map::RawEntryMut::Vacant(slot) => {
slot.insert(crate::ctx_map::Key { value: target, hash }, ());
slot.insert(LookupEntry { nid: target, hash }, ());
let index = self[prev].outputs.iter().position(|&o| o == target).unwrap();
self[prev].outputs.swap_remove(index);
self[with].outputs.push(target);
@ -813,7 +845,7 @@ impl fmt::Display for Kind {
#[derive(Debug, Default, Clone)]
//#[repr(align(64))]
pub struct Node {
struct Node {
kind: Kind,
inputs: Vc,
outputs: Vc,
@ -1031,7 +1063,7 @@ impl Codegen {
}
fn make_func_reachable(&mut self, func: ty::Func) {
let fuc = &mut self.tys.ins.funcs[func as usize];
let fuc = &mut self.tys.funcs[func as usize];
if fuc.offset == u32::MAX {
fuc.offset = task::id(self.tasks.len() as _);
self.tasks.push(Some(FTask { file: fuc.file, id: func }));
@ -1112,7 +1144,7 @@ impl Codegen {
.tys
.struct_fields(s)
.iter()
.map(|f| self.tys.names.ident_str(f.name))
.map(|f| self.tys.field_names.ident_str(f.name))
.intersperse("', '")
.collect::<String>();
self.report(
@ -1235,7 +1267,7 @@ impl Codegen {
self.make_func_reachable(func);
let fuc = &self.tys.ins.funcs[func as usize];
let fuc = &self.tys.funcs[func as usize];
let sig = fuc.sig.expect("TODO: generic functions");
let ast = self.files[fuc.file as usize].clone();
let Expr::BinOp { right: &Expr::Closure { args: cargs, .. }, .. } =
@ -1257,7 +1289,7 @@ impl Codegen {
let mut inps = Vc::from([self.ci.ctrl]);
for ((arg, carg), tyx) in args.iter().zip(cargs).zip(sig.args.range()) {
let ty = self.tys.ins.args[tyx];
let ty = self.tys.args[tyx];
if self.tys.size_of(ty) == 0 {
continue;
}
@ -1342,7 +1374,7 @@ impl Codegen {
.iter()
.zip(offs)
.filter(|&(_, (ty, _))| ty != ty::Id::UNDECLARED)
.map(|(f, _)| self.tys.names.ident_str(f.name))
.map(|(f, _)| self.tys.field_names.ident_str(f.name))
.intersperse(", ")
.collect::<String>();
@ -1637,7 +1669,7 @@ impl Codegen {
}
fn emit_func(&mut self, FTask { file, id }: FTask) {
let func = &mut self.tys.ins.funcs[id as usize];
let func = &mut self.tys.funcs[id as usize];
func.offset = u32::MAX - 1;
debug_assert!(func.file == file);
let sig = func.sig.unwrap();
@ -1674,7 +1706,7 @@ impl Codegen {
let mut sig_args = sig.args.range();
for (arg, index) in args.iter().zip(0u32..) {
let ty = self.tys.ins.args[sig_args.next().unwrap()];
let ty = self.tys.args[sig_args.next().unwrap()];
let value = self.ci.nodes.new_node(ty, Kind::Arg { index }, [VOID]);
self.ci.nodes.lock(value);
let sym = parser::find_symbol(&ast.symbols, arg.id);
@ -1800,8 +1832,8 @@ impl Codegen {
self.ci.emit(instrs::jala(reg::ZERO, reg::RET_ADDR, 0));
}
self.tys.ins.funcs[id as usize].code.append(&mut self.ci.code);
self.tys.ins.funcs[id as usize].relocs.append(&mut self.ci.relocs);
self.tys.funcs[id as usize].code.append(&mut self.ci.code);
self.tys.funcs[id as usize].relocs.append(&mut self.ci.relocs);
self.ci.nodes.clear();
self.ci.filled.clear();
self.pool.cis.push(core::mem::replace(&mut self.ci, prev_ci));
@ -2010,9 +2042,9 @@ impl Codegen {
};
let key = SymKey::Decl(file, ident);
if let Some(existing) = self.tys.syms.get(key, &self.tys.ins) {
if let Some(existing) = self.tys.syms.get(&key) {
if let ty::Kind::Func(id) = existing.expand()
&& let func = &mut self.tys.ins.funcs[id as usize]
&& let func = &mut self.tys.funcs[id as usize]
&& let Err(idx) = task::unpack(func.offset)
&& idx < self.tasks.len()
{
@ -2026,20 +2058,19 @@ impl Codegen {
let prev_file = core::mem::replace(&mut self.ci.file, file);
let sym = match expr {
Expr::BinOp {
left: &Expr::Ident { id, .. },
left: Expr::Ident { .. },
op: TokenKind::Decl,
right: &Expr::Closure { pos, args, ret, .. },
} => {
let func = Func {
file,
name: id,
sig: '_b: {
let arg_base = self.tys.ins.args.len();
let arg_base = self.tys.args.len();
for arg in args {
let sym = parser::find_symbol(&f.symbols, arg.id);
assert!(sym.flags & idfl::COMPTIME == 0, "TODO");
let ty = self.ty(&arg.ty);
self.tys.ins.args.push(ty);
self.tys.args.push(ty);
}
let Some(args) = self.pack_args(arg_base) else {
@ -2060,8 +2091,8 @@ impl Codegen {
..Default::default()
};
let id = self.tys.ins.funcs.len() as _;
self.tys.ins.funcs.push(func);
let id = self.tys.funcs.len() as _;
self.tys.funcs.push(func);
ty::Kind::Func(id)
}
@ -2073,7 +2104,7 @@ impl Codegen {
e => unimplemented!("{e:#?}"),
};
self.ci.file = prev_file;
self.tys.syms.insert(key, sym.compress(), &self.tys.ins);
self.tys.syms.insert(key, sym.compress());
sym
}
@ -2152,15 +2183,15 @@ impl Codegen {
}
fn pack_args(&mut self, arg_base: usize) -> Option<ty::Tuple> {
let needle = &self.tys.ins.args[arg_base..];
let needle = &self.tys.args[arg_base..];
if needle.is_empty() {
return Some(ty::Tuple::empty());
}
let len = needle.len();
// FIXME: maybe later when this becomes a bottleneck we use more
// efficient search (SIMD?, indexing?)
let sp = self.tys.ins.args.windows(needle.len()).position(|val| val == needle).unwrap();
self.tys.ins.args.truncate((sp + needle.len()).max(arg_base));
let sp = self.tys.args.windows(needle.len()).position(|val| val == needle).unwrap();
self.tys.args.truncate((sp + needle.len()).max(arg_base));
ty::Tuple::new(sp, len)
}
@ -2396,7 +2427,7 @@ impl<'a> Function<'a> {
for (arg, ti) in
self.nodes[VOID].clone().outputs.into_iter().skip(2).zip(self.sig.args.range())
{
let ty = self.tys.ins.args[ti];
let ty = self.tys.args[ti];
match self.tys.size_of(ty) {
0 => continue,
1..=8 => {
@ -2445,7 +2476,7 @@ impl<'a> Function<'a> {
self.nodes[nid].ralloc_backref = self.nodes[prev].ralloc_backref;
let mut ops = vec![];
let fuc = self.tys.ins.funcs[func as usize].sig.unwrap();
let fuc = self.tys.funcs[func as usize].sig.unwrap();
if self.tys.size_of(fuc.ret) != 0 {
self.def_nid(nid);
ops.push(regalloc2::Operand::reg_fixed_def(
@ -2456,7 +2487,7 @@ impl<'a> Function<'a> {
let mut parama = self.tys.parama(fuc.ret);
for (&(mut i), ti) in node.inputs[1..].iter().zip(fuc.args.range()) {
let ty = self.tys.ins.args[ti];
let ty = self.tys.args[ti];
loop {
match self.nodes[i].kind {
Kind::Stre { .. } => i = self.nodes[i].inputs[2],