This commit is contained in:
Jakub Doka 2024-10-01 21:33:30 +02:00
parent b2254e9820
commit bdc2c43773
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
5 changed files with 180 additions and 151 deletions

10
Cargo.lock generated
View file

@ -110,6 +110,12 @@ version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
[[package]]
name = "hashbrown"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
[[package]]
name = "hbbytecode"
version = "0.1.0"
@ -123,7 +129,7 @@ name = "hblang"
version = "0.1.0"
dependencies = [
"env_logger",
"hashbrown",
"hashbrown 0.15.0",
"hbbytecode",
"hbvm",
"log",
@ -191,7 +197,7 @@ source = "git+https://github.com/jakubDoka/regalloc2?branch=reuse-allocations#41
dependencies = [
"allocator-api2",
"bumpalo",
"hashbrown",
"hashbrown 0.14.5",
"log",
"rustc-hash",
"smallvec",

View file

@ -8,7 +8,7 @@ name = "hbc"
path = "src/main.rs"
[dependencies]
hashbrown = { version = "0.14.5", default-features = false }
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] }
hbbytecode = { version = "0.1.0", path = "../hbbytecode" }
hbvm = { path = "../hbvm", features = ["nightly"] }
log = { version = "0.4.22", features = ["release_max_level_error"] }

View file

@ -747,10 +747,10 @@ impl Codegen {
return ty.expand().inner();
}
let prev_tmp = self.tys.fields_tmp.len();
let prev_tmp = self.tys.tmp.fields.len();
for sf in fields.iter().filter_map(CommentOr::or) {
let f = Field { name: self.tys.field_names.intern(sf.name), ty: self.ty(&sf.ty) };
self.tys.fields_tmp.push(f);
let f = Field { name: self.tys.names.intern(sf.name), ty: self.ty(&sf.ty) };
self.tys.tmp.fields.push(f);
}
self.tys.structs.push(Struct {
field_start: self.tys.fields.len() as _,
@ -758,7 +758,7 @@ impl Codegen {
file,
..Default::default()
});
self.tys.fields.extend(self.tys.fields_tmp.drain(prev_tmp..));
self.tys.fields.extend(self.tys.tmp.fields.drain(prev_tmp..));
if let Some(sym) = sym {
self.tys
@ -2580,7 +2580,7 @@ impl Codegen {
self.ci.free_loc(ret.loc);
Global { ty: ret.ty, file, name, data, ast: ExprRef::new(expr), ..Default::default() }
Global { ty: ret.ty, file, name, data, ..Default::default() }
}
fn ct_eval<T, E>(

View file

@ -36,7 +36,7 @@ use {
ty::ArrayLen,
},
alloc::{collections::BTreeMap, string::String, vec::Vec},
core::{cell::Cell, fmt::Display, hash::BuildHasher, ops::Range, usize},
core::{cell::Cell, fmt::Display, ops::Range},
hashbrown::hash_map,
hbbytecode as instrs,
};
@ -69,6 +69,89 @@ pub mod son;
mod lexer;
mod vc;
mod ctx_map {
use core::hash::BuildHasher;
pub type Hash = u64;
pub type HashBuilder = core::hash::BuildHasherDefault<IdentityHasher>;
#[derive(Default)]
pub struct IdentityHasher(u64);
impl core::hash::Hasher for IdentityHasher {
fn finish(&self) -> u64 {
self.0
}
fn write(&mut self, _: &[u8]) {
unimplemented!()
}
fn write_u64(&mut self, i: u64) {
self.0 = i;
}
}
pub struct Key<T> {
pub value: T,
pub hash: Hash,
}
impl<T> core::hash::Hash for Key<T> {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.hash);
}
}
pub trait CtxEntry {
type Ctx: ?Sized;
type Key<'a>: Eq + core::hash::Hash;
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a>;
}
pub struct CtxMap<T> {
inner: hashbrown::HashMap<Key<T>, (), HashBuilder>,
}
impl<T> Default for CtxMap<T> {
fn default() -> Self {
Self { inner: Default::default() }
}
}
impl<T: CtxEntry> CtxMap<T> {
pub fn entry<'a, 'b>(
&'a mut self,
value: T::Key<'b>,
ctx: &'b T::Ctx,
) -> (hashbrown::hash_map::RawEntryMut<'a, Key<T>, (), HashBuilder>, Hash) {
let hash = crate::FnvBuildHasher::default().hash_one(&value);
(self.inner.raw_entry_mut().from_hash(hash, |k| k.value.key(ctx) == value), hash)
}
pub fn get<'a>(&self, value: T::Key<'a>, ctx: &'a T::Ctx) -> Option<&T> {
let hash = crate::FnvBuildHasher::default().hash_one(&value);
self.inner
.raw_entry()
.from_hash(hash, |k| k.value.key(ctx) == value)
.map(|(k, _)| &k.value)
}
pub fn clear(&mut self) {
self.inner.clear();
}
pub fn remove<'a>(&mut self, value: &T, ctx: &'a T::Ctx) -> Option<T> {
let (entry, _) = self.entry(value.key(ctx), ctx);
match entry {
hashbrown::hash_map::RawEntryMut::Occupied(o) => Some(o.remove_entry().0.value),
hashbrown::hash_map::RawEntryMut::Vacant(_) => None,
}
}
}
}
mod task {
use super::Offset;
@ -124,7 +207,7 @@ mod ty {
lexer::TokenKind,
parser::{self},
},
core::{num::NonZeroU32, ops::Range, usize},
core::{num::NonZeroU32, ops::Range},
};
pub type ArrayLen = u32;
@ -153,10 +236,6 @@ mod ty {
Some(Self((pos << Self::LEN_BITS | len) as u32))
}
//pub fn view(self, slice: &[Id]) -> &[Id] {
// &slice[self.0 as usize >> Self::LEN_BITS..][..self.len()]
//}
pub fn range(self) -> Range<usize> {
let start = self.0 as usize >> Self::LEN_BITS;
start..start + self.len()
@ -169,15 +248,20 @@ mod ty {
pub fn empty() -> Self {
Self(0)
}
pub fn repr(&self) -> u32 {
self.0
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub struct Id(NonZeroU32);
impl crate::ctx_map::CtxEntry for Id {
type Ctx = TypeInsts;
type Key<'a> = crate::SymKey<'a>;
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
todo!()
}
}
impl Default for Id {
fn default() -> Self {
Self(unsafe { NonZeroU32::new_unchecked(UNDECLARED) })
@ -399,12 +483,7 @@ mod ty {
if i != 0 {
write!(f, ", ")?;
}
write!(
f,
"{}: {}",
self.tys.field_names.ident_str(name),
self.rety(ty)
)?;
write!(f, "{}: {}", self.tys.names.ident_str(name), self.rety(ty))?;
}
write!(f, "}}")
} else {
@ -442,15 +521,14 @@ fn emit(out: &mut Vec<u8>, (len, instr): EncodedInstr) {
out.extend_from_slice(&instr[..len]);
}
#[derive(PartialEq, Eq, Hash, Debug)]
enum SymKey {
Pointer(ty::Id),
#[derive(PartialEq, Eq, Hash)]
enum SymKey<'a> {
Pointer(&'a Ptr),
Struct(FileId, Pos),
FuncInst(ty::Func, ty::Tuple),
MomizedCall(ty::Func),
Decl(FileId, Ident),
Slice(ty::Id),
Array(ty::Id, ArrayLen),
Array(&'a Array),
}
#[derive(Clone, Copy)]
@ -491,7 +569,6 @@ struct Global {
file: FileId,
name: Ident,
ty: ty::Id,
ast: ExprRef,
offset: Offset,
data: Vec<u8>,
}
@ -502,7 +579,6 @@ impl Default for Global {
ty: Default::default(),
offset: u32::MAX,
data: Default::default(),
ast: ExprRef::default(),
file: u32::MAX,
name: u32::MAX,
}
@ -551,11 +627,12 @@ struct Struct {
field_start: u32,
}
#[derive(PartialEq, Eq, Hash)]
struct Ptr {
base: ty::Id,
}
#[derive(Clone, Copy)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
struct Array {
ty: ty::Id,
len: ArrayLen,
@ -586,56 +663,30 @@ struct AbleOsExecutableHeader {
metadata_length: u64,
}
struct IdentEntry {
hash: u32,
ident: Ident,
}
impl ctx_map::CtxEntry for Ident {
type Ctx = str;
type Key<'a> = &'a str;
impl core::hash::Hash for IdentEntry {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
state.write_u64((self.hash as u64) << 32);
}
}
#[derive(Default)]
struct IdentityHasher(u64);
impl core::hash::Hasher for IdentityHasher {
fn finish(&self) -> u64 {
self.0
}
fn write(&mut self, _: &[u8]) {
unimplemented!()
}
fn write_u64(&mut self, i: u64) {
self.0 = i;
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
unsafe { ctx.get_unchecked(ident::range(*self)) }
}
}
#[derive(Default)]
struct IdentInterner {
lookup: hashbrown::HashMap<IdentEntry, (), core::hash::BuildHasherDefault<IdentityHasher>>,
lookup: ctx_map::CtxMap<Ident>,
strings: String,
}
impl IdentInterner {
fn intern(&mut self, ident: &str) -> Ident {
let hash = FnvBuildHasher::default().hash_one(ident) & 0xFFFFFFFF00000000;
match self.lookup.raw_entry_mut().from_hash(
hash,
|k| unsafe { self.strings.get_unchecked(ident::range(k.ident)) } == ident,
) {
hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.ident,
let (entry, hash) = self.lookup.entry(ident, &self.strings);
match entry {
hash_map::RawEntryMut::Occupied(o) => o.get_key_value().0.value,
hash_map::RawEntryMut::Vacant(v) => {
let id = ident::new(self.strings.len() as _, ident.len() as _);
self.strings.push_str(ident);
v.insert_hashed_nocheck(
hash,
IdentEntry { hash: (hash >> 32) as _, ident: id },
(),
);
v.insert(ctx_map::Key { hash, value: id }, ());
id
}
}
@ -646,33 +697,35 @@ impl IdentInterner {
}
fn project(&self, ident: &str) -> Option<Ident> {
let hash = FnvBuildHasher::default().hash_one(ident) & 0xFFFFFFFF00000000;
self.lookup
.raw_entry()
.from_hash(
hash,
|k| unsafe { self.strings.get_unchecked(ident::range(k.ident)) } == ident,
)
.map(|(k, _)| k.ident)
self.lookup.get(ident, &self.strings).copied()
}
}
#[derive(Default)]
struct Types {
syms: HashMap<SymKey, ty::Id>,
struct TypesTmp {
fields: Vec<Field>,
frontier: Vec<ty::Id>,
globals: Vec<ty::Global>,
funcs: Vec<ty::Func>,
}
#[derive(Default)]
struct TypeIns {
funcs: Vec<Func>,
args: Vec<ty::Id>,
globals: Vec<Global>,
structs: Vec<Struct>,
fields: Vec<Field>,
field_names: IdentInterner,
ptrs: Vec<Ptr>,
arrays: Vec<Array>,
}
fields_tmp: Vec<Field>,
frontier_tmp: Vec<ty::Id>,
reachable_globals: Vec<ty::Global>,
reachable_funcs: Vec<ty::Func>,
#[derive(Default)]
struct Types {
syms: ctx_map::CtxMap<ty::Id>,
names: IdentInterner,
ins: TypeIns,
tmp: TypesTmp,
}
const HEADER_SIZE: usize = core::mem::size_of::<AbleOsExecutableHeader>();
@ -757,13 +810,13 @@ impl Types {
return Some(ty);
}
let prev_tmp = self.fields_tmp.len();
let prev_tmp = self.tmp.fields.len();
for field in fields.iter().filter_map(CommentOr::or) {
let Some(ty) = self.ty(file, &field.ty, files) else {
self.fields_tmp.truncate(prev_tmp);
self.tmp.fields.truncate(prev_tmp);
return None;
};
self.fields_tmp.push(Field { name: self.field_names.intern(field.name), ty });
self.tmp.fields.push(Field { name: self.names.intern(field.name), ty });
}
self.structs.push(Struct {
@ -772,7 +825,7 @@ impl Types {
explicit_alignment: packed.then_some(1),
..Default::default()
});
self.fields.extend(self.fields_tmp.drain(prev_tmp..));
self.fields.extend(self.tmp.fields.drain(prev_tmp..));
let ty = ty::Kind::Struct(self.structs.len() as u32 - 1).compress();
self.syms.insert(sym, ty);
@ -794,12 +847,12 @@ impl Types {
}
fn dump_reachable(&mut self, from: ty::Func, to: &mut Vec<u8>) -> AbleOsExecutableHeader {
debug_assert!(self.frontier_tmp.is_empty());
debug_assert!(self.reachable_funcs.is_empty());
debug_assert!(self.reachable_globals.is_empty());
debug_assert!(self.tmp.frontier.is_empty());
debug_assert!(self.tmp.funcs.is_empty());
debug_assert!(self.tmp.globals.is_empty());
self.frontier_tmp.push(ty::Kind::Func(from).compress());
while let Some(itm) = self.frontier_tmp.pop() {
self.tmp.frontier.push(ty::Kind::Func(from).compress());
while let Some(itm) = self.tmp.frontier.pop() {
match itm.expand() {
ty::Kind::Func(func) => {
let fuc = &mut self.funcs[func as usize];
@ -807,8 +860,8 @@ impl Types {
continue;
}
fuc.offset = 0;
self.reachable_funcs.push(func);
self.frontier_tmp.extend(fuc.relocs.iter().map(|r| r.target));
self.tmp.funcs.push(func);
self.tmp.frontier.extend(fuc.relocs.iter().map(|r| r.target));
}
ty::Kind::Global(glob) => {
let glb = &mut self.globals[glob as usize];
@ -816,13 +869,13 @@ impl Types {
continue;
}
glb.offset = 0;
self.reachable_globals.push(glob);
self.tmp.globals.push(glob);
}
_ => unreachable!(),
}
}
for &func in &self.reachable_funcs {
for &func in &self.tmp.funcs {
let fuc = &mut self.funcs[func as usize];
fuc.offset = to.len() as _;
to.extend(&fuc.code);
@ -830,7 +883,7 @@ impl Types {
let code_length = to.len();
for global in self.reachable_globals.drain(..) {
for global in self.tmp.globals.drain(..) {
let global = &mut self.globals[global as usize];
global.offset = to.len() as _;
to.extend(&global.data);
@ -838,7 +891,7 @@ impl Types {
let data_length = to.len() - code_length;
for func in self.reachable_funcs.drain(..) {
for func in self.tmp.funcs.drain(..) {
let fuc = &self.funcs[func as usize];
for rel in &fuc.relocs {
let offset = match rel.target.expand() {
@ -1003,7 +1056,7 @@ impl Types {
}
fn find_struct_field(&self, s: ty::Struct, name: &str) -> Option<usize> {
let name = self.field_names.project(name)?;
let name = self.names.project(name)?;
self.struct_fields(s).iter().position(|f| f.name == name)
}
}
@ -1020,7 +1073,7 @@ impl OffsetIter {
}
fn offset_of(tys: &Types, idx: ty::Struct, field: &str) -> Option<(Offset, ty::Id)> {
let field_id = tys.field_names.project(field)?;
let field_id = tys.names.project(field)?;
OffsetIter::new(idx, tys)
.into_iter(tys)
.find(|(f, _)| f.name == field_id)

View file

@ -1,5 +1,6 @@
use {
crate::{
ctx_map::CtxEntry,
ident::Ident,
instrs,
lexer::{self, TokenKind},
@ -11,7 +12,7 @@ use {
task,
ty::{self},
vc::{BitSet, Vc},
Func, HashMap, IdentityHasher, Offset, OffsetIter, Reloc, Sig, SymKey, TypedReloc, Types,
Func, HashMap, Offset, OffsetIter, Reloc, Sig, SymKey, TypedReloc, Types,
},
alloc::{borrow::ToOwned, string::String, vec::Vec},
core::{
@ -20,12 +21,11 @@ use {
convert::identity,
fmt::{self, Debug, Display, Write},
format_args as fa,
hash::{BuildHasher, Hasher},
mem, ops,
hash::BuildHasher,
mem, ops, usize,
},
hashbrown::hash_map,
regalloc2::VReg,
std::process::id,
};
const VOID: Nid = 0;
@ -44,19 +44,17 @@ pub mod reg {
pub type Reg = u8;
}
struct LookupEntry {
nid: Nid,
hash: u64,
}
type Lookup = crate::ctx_map::CtxMap<Nid>;
impl core::hash::Hash for LookupEntry {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u64(self.hash);
impl crate::ctx_map::CtxEntry for Nid {
type Ctx = [Result<Node, Nid>];
type Key<'a> = (Kind, &'a [Nid], ty::Id);
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
ctx[*self as usize].as_ref().unwrap().key()
}
}
type Lookup = hashbrown::HashMap<LookupEntry, (), core::hash::BuildHasherDefault<IdentityHasher>>;
struct Nodes {
values: Vec<Result<Node, Nid>>,
visited: BitSet,
@ -106,10 +104,10 @@ impl Nodes {
let mut lookup_meta = None;
if !node.is_lazy_phi() {
let (raw_entry, hash) = Self::find_node(&mut self.lookup, &self.values, &node);
let (raw_entry, hash) = self.lookup.entry(node.key(), &self.values);
let entry = match raw_entry {
hash_map::RawEntryMut::Occupied(o) => return o.get_key_value().0.nid,
hash_map::RawEntryMut::Occupied(o) => return o.get_key_value().0.value,
hash_map::RawEntryMut::Vacant(v) => v,
};
@ -129,38 +127,14 @@ impl Nodes {
self.free = mem::replace(&mut self.values[free as usize], Ok(node)).unwrap_err();
if let Some((entry, hash)) = lookup_meta {
entry.insert(LookupEntry { nid: free, hash }, ());
entry.insert(crate::ctx_map::Key { value: free, hash }, ());
}
free
}
fn find_node<'a>(
lookup: &'a mut Lookup,
values: &[Result<Node, Nid>],
node: &Node,
) -> (
hash_map::RawEntryMut<'a, LookupEntry, (), core::hash::BuildHasherDefault<IdentityHasher>>,
u64,
) {
let hash = crate::FnvBuildHasher::default().hash_one(node.key());
let entry = lookup
.raw_entry_mut()
.from_hash(hash, |n| values[n.nid as usize].as_ref().unwrap().key() == node.key());
(entry, hash)
}
fn remove_node_lookup(&mut self, target: Nid) {
if !self[target].is_lazy_phi() {
match Self::find_node(
&mut self.lookup,
&self.values,
self.values[target as usize].as_ref().unwrap(),
)
.0
{
hash_map::RawEntryMut::Occupied(o) => o.remove(),
hash_map::RawEntryMut::Vacant(_) => unreachable!(),
};
self.lookup.remove(&target, &self.values).unwrap();
}
}
@ -379,20 +353,16 @@ impl Nodes {
let prev = self[target].inputs[inp_index];
self[target].inputs[inp_index] = with;
let (entry, hash) = Self::find_node(
&mut self.lookup,
&self.values,
self.values[target as usize].as_ref().unwrap(),
);
let (entry, hash) = self.lookup.entry(target.key(&self.values), &self.values);
match entry {
hash_map::RawEntryMut::Occupied(other) => {
let rpl = other.get_key_value().0.nid;
let rpl = other.get_key_value().0.value;
self[target].inputs[inp_index] = prev;
self.replace(target, rpl);
rpl
}
hash_map::RawEntryMut::Vacant(slot) => {
slot.insert(LookupEntry { nid: target, hash }, ());
slot.insert(crate::ctx_map::Key { value: target, hash }, ());
let index = self[prev].outputs.iter().position(|&o| o == target).unwrap();
self[prev].outputs.swap_remove(index);
self[with].outputs.push(target);
@ -845,7 +815,7 @@ impl fmt::Display for Kind {
#[derive(Debug, Default, Clone)]
//#[repr(align(64))]
struct Node {
pub struct Node {
kind: Kind,
inputs: Vc,
outputs: Vc,
@ -1144,7 +1114,7 @@ impl Codegen {
.tys
.struct_fields(s)
.iter()
.map(|f| self.tys.field_names.ident_str(f.name))
.map(|f| self.tys.names.ident_str(f.name))
.intersperse("', '")
.collect::<String>();
self.report(
@ -1374,7 +1344,7 @@ impl Codegen {
.iter()
.zip(offs)
.filter(|&(_, (ty, _))| ty != ty::Id::UNDECLARED)
.map(|(f, _)| self.tys.field_names.ident_str(f.name))
.map(|(f, _)| self.tys.names.ident_str(f.name))
.intersperse(", ")
.collect::<String>();