forked from AbleOS/holey-bytes
Restructuring the compiler
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
parent
cf672beb79
commit
3b4b30b2bd
|
@ -6,7 +6,8 @@ use {
|
||||||
alloc::{string::String, vec::Vec},
|
alloc::{string::String, vec::Vec},
|
||||||
core::ffi::CStr,
|
core::ffi::CStr,
|
||||||
hblang::{
|
hblang::{
|
||||||
son::{hbvm::HbvmBackend, Codegen, CodegenCtx},
|
backend::hbvm::HbvmBackend,
|
||||||
|
son::{Codegen, CodegenCtx},
|
||||||
ty::Module,
|
ty::Module,
|
||||||
Ent,
|
Ent,
|
||||||
},
|
},
|
||||||
|
|
|
@ -244,7 +244,13 @@ main := fn(): uint {
|
||||||
|
|
||||||
#### enums
|
#### enums
|
||||||
```hb
|
```hb
|
||||||
Enum := enum {A, B, C}
|
Enum := enum {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
C,
|
||||||
|
|
||||||
|
$default := Self.A
|
||||||
|
}
|
||||||
|
|
||||||
some_enum := fn(): Enum return .A
|
some_enum := fn(): Enum return .A
|
||||||
|
|
||||||
|
@ -252,7 +258,7 @@ main := fn(): uint {
|
||||||
e := some_enum()
|
e := some_enum()
|
||||||
|
|
||||||
match e {
|
match e {
|
||||||
.A => return 0,
|
Enum.default => return 0,
|
||||||
_ => return 100,
|
_ => return 100,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,14 @@
|
||||||
use {
|
use {
|
||||||
super::{AssemblySpec, Backend, Nid, Node, Nodes, VOID},
|
super::{AssemblySpec, Backend},
|
||||||
crate::{
|
crate::{
|
||||||
lexer::TokenKind,
|
lexer::TokenKind,
|
||||||
parser,
|
parser,
|
||||||
son::{debug_assert_matches, Kind, MEM},
|
son::{Kind, Nid, Node, Nodes, MEM, VOID},
|
||||||
ty::{self, Arg, Loc, Module},
|
ty::{self, Arg, Loc, Module, Offset, Sig, Size, Types},
|
||||||
utils::{Ent, EntVec},
|
utils::{Ent, EntVec},
|
||||||
Offset, Sig, Size, Types,
|
|
||||||
},
|
},
|
||||||
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
|
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
|
||||||
core::{mem, ops::Range},
|
core::{assert_matches::debug_assert_matches, mem, ops::Range},
|
||||||
hbbytecode::{self as instrs, *},
|
hbbytecode::{self as instrs, *},
|
||||||
reg::Reg,
|
reg::Reg,
|
||||||
};
|
};
|
||||||
|
@ -254,30 +253,24 @@ impl Backend for HbvmBackend {
|
||||||
hbbytecode::disasm(&mut sluce, &functions, output, eca_handler)
|
hbbytecode::disasm(&mut sluce, &functions, output, eca_handler)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_ct_body(
|
fn emit_ct_body(&mut self, id: ty::Func, nodes: &Nodes, tys: &Types, files: &[parser::Ast]) {
|
||||||
&mut self,
|
|
||||||
id: ty::Func,
|
|
||||||
nodes: &mut Nodes,
|
|
||||||
tys: &Types,
|
|
||||||
files: &[parser::Ast],
|
|
||||||
) {
|
|
||||||
self.emit_body(id, nodes, tys, files);
|
self.emit_body(id, nodes, tys, files);
|
||||||
let fd = &mut self.funcs[id];
|
let fd = &mut self.funcs[id];
|
||||||
fd.code.truncate(fd.code.len() - instrs::jala(0, 0, 0).0);
|
fd.code.truncate(fd.code.len() - instrs::jala(0, 0, 0).0);
|
||||||
emit(&mut fd.code, instrs::tx());
|
emit(&mut fd.code, instrs::tx());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_body(&mut self, id: ty::Func, nodes: &mut Nodes, tys: &Types, files: &[parser::Ast]) {
|
fn emit_body(&mut self, id: ty::Func, nodes: &Nodes, tys: &Types, files: &[parser::Ast]) {
|
||||||
let sig = tys.ins.funcs[id].sig.unwrap();
|
let sig = tys.ins.funcs[id].sig.unwrap();
|
||||||
|
|
||||||
debug_assert!(self.code.is_empty());
|
debug_assert!(self.code.is_empty());
|
||||||
|
|
||||||
self.offsets.clear();
|
self.offsets.clear();
|
||||||
self.offsets.resize(nodes.values.len(), Offset::MAX);
|
self.offsets.resize(nodes.len(), Offset::MAX);
|
||||||
|
|
||||||
let mut stack_size = 0;
|
let mut stack_size = 0;
|
||||||
'_compute_stack: {
|
'_compute_stack: {
|
||||||
let mems = mem::take(&mut nodes[MEM].outputs);
|
let mems = &nodes[MEM].outputs;
|
||||||
for &stck in mems.iter() {
|
for &stck in mems.iter() {
|
||||||
if !matches!(nodes[stck].kind, Kind::Stck | Kind::Arg) {
|
if !matches!(nodes[stck].kind, Kind::Stck | Kind::Arg) {
|
||||||
debug_assert_matches!(
|
debug_assert_matches!(
|
||||||
|
@ -300,7 +293,6 @@ impl Backend for HbvmBackend {
|
||||||
}
|
}
|
||||||
self.offsets[stck as usize] = stack_size - self.offsets[stck as usize];
|
self.offsets[stck as usize] = stack_size - self.offsets[stck as usize];
|
||||||
}
|
}
|
||||||
nodes[MEM].outputs = mems;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let (saved, tail) = self.emit_body_code(nodes, sig, tys, files);
|
let (saved, tail) = self.emit_body_code(nodes, sig, tys, files);
|
|
@ -1,20 +1,16 @@
|
||||||
use {
|
use {
|
||||||
crate::{
|
crate::{
|
||||||
parser, quad_sort,
|
backend::hbvm::{
|
||||||
son::{
|
|
||||||
debug_assert_matches,
|
|
||||||
hbvm::{
|
|
||||||
reg::{self, Reg},
|
reg::{self, Reg},
|
||||||
HbvmBackend, Nid, Nodes, PLoc,
|
HbvmBackend, Nid, Nodes, PLoc,
|
||||||
},
|
},
|
||||||
Kind, ARG_START, MEM, VOID,
|
parser, quad_sort,
|
||||||
},
|
son::{Kind, ARG_START, MEM, VOID},
|
||||||
ty::{self, Arg, Loc},
|
ty::{self, Arg, Loc, Sig, Types},
|
||||||
utils::BitSet,
|
utils::BitSet,
|
||||||
Sig, Types,
|
|
||||||
},
|
},
|
||||||
alloc::{borrow::ToOwned, vec::Vec},
|
alloc::{borrow::ToOwned, vec::Vec},
|
||||||
core::{mem, ops::Range, u8, usize},
|
core::{assert_matches::debug_assert_matches, mem, ops::Range},
|
||||||
hbbytecode::{self as instrs},
|
hbbytecode::{self as instrs},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -27,7 +23,6 @@ impl HbvmBackend {
|
||||||
files: &[parser::Ast],
|
files: &[parser::Ast],
|
||||||
) -> (usize, bool) {
|
) -> (usize, bool) {
|
||||||
let tail = Function::build(nodes, tys, &mut self.ralloc, sig);
|
let tail = Function::build(nodes, tys, &mut self.ralloc, sig);
|
||||||
nodes.basic_blocks();
|
|
||||||
|
|
||||||
let strip_load = |value| match nodes[value].kind {
|
let strip_load = |value| match nodes[value].kind {
|
||||||
Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1],
|
Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1],
|
||||||
|
@ -69,9 +64,8 @@ impl HbvmBackend {
|
||||||
|
|
||||||
let atr = |allc: Nid| {
|
let atr = |allc: Nid| {
|
||||||
let allc = strip_load(allc);
|
let allc = strip_load(allc);
|
||||||
debug_assert_eq!(
|
debug_assert!(
|
||||||
nodes[allc].lock_rc.get(),
|
nodes.is_unlocked(allc),
|
||||||
0,
|
|
||||||
"{:?} {}",
|
"{:?} {}",
|
||||||
nodes[allc],
|
nodes[allc],
|
||||||
ty::Display::new(tys, files, nodes[allc].ty)
|
ty::Display::new(tys, files, nodes[allc].ty)
|
||||||
|
@ -114,9 +108,8 @@ impl HbvmBackend {
|
||||||
|
|
||||||
let atr = |allc: Nid| {
|
let atr = |allc: Nid| {
|
||||||
let allc = strip_load(allc);
|
let allc = strip_load(allc);
|
||||||
debug_assert_eq!(
|
debug_assert!(
|
||||||
nodes[allc].lock_rc.get(),
|
nodes.is_unlocked(allc),
|
||||||
0,
|
|
||||||
"{:?} {}",
|
"{:?} {}",
|
||||||
nodes[allc],
|
nodes[allc],
|
||||||
ty::Display::new(tys, files, nodes[allc].ty)
|
ty::Display::new(tys, files, nodes[allc].ty)
|
||||||
|
@ -164,12 +157,23 @@ impl HbvmBackend {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
debug_assert_eq!(moves.len(), {
|
// code makes sure all moves are ordered so that register is only moved
|
||||||
moves.sort_unstable();
|
// into after all its uses
|
||||||
moves.dedup();
|
//
|
||||||
moves.len()
|
// in case of cycles, swaps are used instead in which case the conflicting
|
||||||
});
|
// move is removed and remining moves are replaced with swaps
|
||||||
|
|
||||||
|
const CYCLE_SENTINEL: u8 = u8::MAX;
|
||||||
|
|
||||||
|
debug_assert_eq!(
|
||||||
|
{
|
||||||
|
let mut dests = moves.iter().map(|&[d, ..]| d).collect::<Vec<_>>();
|
||||||
|
dests.sort_unstable();
|
||||||
|
dests.dedup();
|
||||||
|
dests.len()
|
||||||
|
},
|
||||||
|
moves.len()
|
||||||
|
);
|
||||||
let mut graph = [u8::MAX; 256];
|
let mut graph = [u8::MAX; 256];
|
||||||
for &[d, s, _] in moves.iter() {
|
for &[d, s, _] in moves.iter() {
|
||||||
graph[d as usize] = s;
|
graph[d as usize] = s;
|
||||||
|
@ -193,18 +197,20 @@ impl HbvmBackend {
|
||||||
// cut the cycle
|
// cut the cycle
|
||||||
graph[c as usize] = u8::MAX;
|
graph[c as usize] = u8::MAX;
|
||||||
// mark cycyle
|
// mark cycyle
|
||||||
*depth = u8::MAX;
|
*depth = CYCLE_SENTINEL;
|
||||||
}
|
}
|
||||||
|
|
||||||
quad_sort(&mut moves, |a, b| a[2].cmp(&b[2]).reverse());
|
quad_sort(&mut moves, |a, b| a[2].cmp(&b[2]).reverse());
|
||||||
|
|
||||||
for [mut d, mut s, depth] in moves {
|
for [mut d, mut s, depth] in moves {
|
||||||
if depth == u8::MAX {
|
if depth == CYCLE_SENTINEL {
|
||||||
while graph[s as usize] != u8::MAX {
|
while graph[s as usize] != u8::MAX {
|
||||||
self.emit(instrs::swa(d, s));
|
self.emit(instrs::swa(d, s));
|
||||||
d = s;
|
d = s;
|
||||||
mem::swap(&mut graph[s as usize], &mut s);
|
mem::swap(&mut graph[s as usize], &mut s);
|
||||||
}
|
}
|
||||||
|
// trivial cycle denotes this move was already generated in a
|
||||||
|
// cycyle
|
||||||
graph[s as usize] = s;
|
graph[s as usize] = s;
|
||||||
} else if graph[s as usize] != s {
|
} else if graph[s as usize] != s {
|
||||||
self.emit(instrs::cp(d, s));
|
self.emit(instrs::cp(d, s));
|
||||||
|
@ -383,8 +389,8 @@ impl<'a> Function<'a> {
|
||||||
fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool {
|
fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool {
|
||||||
func.blocks.clear();
|
func.blocks.clear();
|
||||||
func.instrs.clear();
|
func.instrs.clear();
|
||||||
func.backrefs.resize(nodes.values.len(), u16::MAX);
|
func.backrefs.resize(nodes.len(), u16::MAX);
|
||||||
func.visited.clear(nodes.values.len());
|
func.visited.clear(nodes.len());
|
||||||
let mut s = Self { tail: true, nodes, tys, sig, func };
|
let mut s = Self { tail: true, nodes, tys, sig, func };
|
||||||
s.emit_node(VOID);
|
s.emit_node(VOID);
|
||||||
debug_assert!(s.func.blocks.array_chunks().all(|[a, b]| a.end == b.start));
|
debug_assert!(s.func.blocks.array_chunks().all(|[a, b]| a.end == b.start));
|
||||||
|
@ -528,7 +534,7 @@ impl<'a> Function<'a> {
|
||||||
|
|
||||||
impl Nodes {
|
impl Nodes {
|
||||||
fn vreg_count(&self) -> usize {
|
fn vreg_count(&self) -> usize {
|
||||||
self.values.len()
|
self.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn use_block_of(&self, inst: Nid, uinst: Nid) -> Nid {
|
fn use_block_of(&self, inst: Nid, uinst: Nid) -> Nid {
|
||||||
|
@ -576,7 +582,7 @@ impl Nodes {
|
||||||
.flat_map(|(p, ls)| ls.iter().map(move |l| (p, l)))
|
.flat_map(|(p, ls)| ls.iter().map(move |l| (p, l)))
|
||||||
.filter(|&(o, &n)| self.is_data_dep(o, n))
|
.filter(|&(o, &n)| self.is_data_dep(o, n))
|
||||||
.map(|(p, &n)| (self.use_block_of(p, n), n))
|
.map(|(p, &n)| (self.use_block_of(p, n), n))
|
||||||
.inspect(|&(_, n)| debug_assert_eq!(self[n].lock_rc.get(), 0)),
|
.inspect(|&(_, n)| debug_assert!(self.is_unlocked(n))),
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
|
@ -616,7 +622,7 @@ impl<'a> Regalloc<'a> {
|
||||||
debug_assert!(self.res.dfs_buf.is_empty());
|
debug_assert!(self.res.dfs_buf.is_empty());
|
||||||
|
|
||||||
let mut bundle = Bundle::new(self.res.instrs.len());
|
let mut bundle = Bundle::new(self.res.instrs.len());
|
||||||
self.res.visited.clear(self.nodes.values.len());
|
self.res.visited.clear(self.nodes.len());
|
||||||
|
|
||||||
for i in (0..self.res.blocks.len()).rev() {
|
for i in (0..self.res.blocks.len()).rev() {
|
||||||
for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) {
|
for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) {
|
||||||
|
@ -650,7 +656,7 @@ impl<'a> Regalloc<'a> {
|
||||||
|
|
||||||
fn collect_bundle(&mut self, inst: Nid, into: &mut Bundle) {
|
fn collect_bundle(&mut self, inst: Nid, into: &mut Bundle) {
|
||||||
let dom = self.nodes.idom_of(inst);
|
let dom = self.nodes.idom_of(inst);
|
||||||
self.res.dfs_seem.clear(self.nodes.values.len());
|
self.res.dfs_seem.clear(self.nodes.len());
|
||||||
for (cursor, uinst) in self.nodes.uses_of(inst) {
|
for (cursor, uinst) in self.nodes.uses_of(inst) {
|
||||||
if !self.res.dfs_seem.set(uinst) {
|
if !self.res.dfs_seem.set(uinst) {
|
||||||
continue;
|
continue;
|
|
@ -1,7 +1,9 @@
|
||||||
use {
|
use {
|
||||||
crate::{
|
crate::{
|
||||||
lexer::{self, Lexer, TokenKind},
|
lexer::{self, Lexer, TokenKind},
|
||||||
parser::{self, CommentOr, CtorField, EnumField, Expr, Poser, Radix, StructField},
|
parser::{
|
||||||
|
self, CommentOr, CtorField, EnumField, Expr, FieldList, Poser, Radix, StructField,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
core::{
|
core::{
|
||||||
fmt::{self},
|
fmt::{self},
|
||||||
|
@ -260,6 +262,32 @@ impl<'a> Formatter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn fmt_fields<F: core::fmt::Write, T: Poser + Copy>(
|
||||||
|
&mut self,
|
||||||
|
f: &mut F,
|
||||||
|
keyword: &str,
|
||||||
|
trailing_comma: bool,
|
||||||
|
fields: FieldList<T>,
|
||||||
|
fmt: impl Fn(&mut Self, &T, &mut F) -> Result<(), fmt::Error>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
f.write_str(keyword)?;
|
||||||
|
f.write_str(" {")?;
|
||||||
|
self.fmt_list_low(f, trailing_comma, "}", ",", fields, |s, field, f| {
|
||||||
|
match field {
|
||||||
|
CommentOr::Or(Ok(field)) => fmt(s, field, f)?,
|
||||||
|
CommentOr::Or(Err(scope)) => {
|
||||||
|
s.fmt_list(f, true, "", "", scope, Self::fmt)?;
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
CommentOr::Comment { literal, .. } => {
|
||||||
|
f.write_str(literal)?;
|
||||||
|
f.write_str("\n")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(field.or().is_some())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn fmt<F: core::fmt::Write>(&mut self, expr: &Expr, f: &mut F) -> fmt::Result {
|
pub fn fmt<F: core::fmt::Write>(&mut self, expr: &Expr, f: &mut F) -> fmt::Result {
|
||||||
macro_rules! impl_parenter {
|
macro_rules! impl_parenter {
|
||||||
($($name:ident => $pat:pat,)*) => {
|
($($name:ident => $pat:pat,)*) => {
|
||||||
|
@ -305,41 +333,25 @@ impl<'a> Formatter<'a> {
|
||||||
f.write_str("packed ")?;
|
f.write_str("packed ")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
f.write_str("struct {")?;
|
self.fmt_fields(
|
||||||
self.fmt_list_low(f, trailing_comma, "}", ",", fields, |s, field, f| {
|
f,
|
||||||
match field {
|
"struct",
|
||||||
CommentOr::Or(Ok(StructField { name, ty, .. })) => {
|
trailing_comma,
|
||||||
|
fields,
|
||||||
|
|s, StructField { name, ty, .. }, f| {
|
||||||
f.write_str(name)?;
|
f.write_str(name)?;
|
||||||
f.write_str(": ")?;
|
f.write_str(": ")?;
|
||||||
s.fmt(ty, f)?
|
s.fmt(ty, f)
|
||||||
}
|
},
|
||||||
CommentOr::Or(Err(scope)) => {
|
)
|
||||||
s.fmt_list(f, true, "", "", scope, Self::fmt)?;
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
CommentOr::Comment { literal, .. } => {
|
|
||||||
f.write_str(literal)?;
|
|
||||||
f.write_str("\n")?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(field.or().is_some())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Expr::Enum { variants, trailing_comma, .. } => {
|
|
||||||
f.write_str("enum {")?;
|
|
||||||
self.fmt_list_low(f, trailing_comma, "}", ",", variants, |_, var, f| {
|
|
||||||
match var {
|
|
||||||
CommentOr::Or(EnumField { name, .. }) => {
|
|
||||||
f.write_str(name)?;
|
|
||||||
}
|
|
||||||
CommentOr::Comment { literal, .. } => {
|
|
||||||
f.write_str(literal)?;
|
|
||||||
f.write_str("\n")?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(var.or().is_some())
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
Expr::Enum { variants, trailing_comma, .. } => self.fmt_fields(
|
||||||
|
f,
|
||||||
|
"enum",
|
||||||
|
trailing_comma,
|
||||||
|
variants,
|
||||||
|
|_, EnumField { name, .. }, f| f.write_str(name),
|
||||||
|
),
|
||||||
Expr::Ctor { ty, fields, trailing_comma, .. } => {
|
Expr::Ctor { ty, fields, trailing_comma, .. } => {
|
||||||
if let Some(ty) = ty {
|
if let Some(ty) = ty {
|
||||||
self.fmt_paren(ty, f, unary)?;
|
self.fmt_paren(ty, f, unary)?;
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use {
|
use {
|
||||||
crate::{
|
crate::{
|
||||||
|
backend::hbvm::HbvmBackend,
|
||||||
parser::{Ast, Ctx, FileKind},
|
parser::{Ast, Ctx, FileKind},
|
||||||
son::{self, hbvm::HbvmBackend},
|
son::{self},
|
||||||
ty, FnvBuildHasher,
|
ty, FnvBuildHasher,
|
||||||
},
|
},
|
||||||
alloc::{string::String, vec::Vec},
|
alloc::{string::String, vec::Vec},
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use {
|
use {
|
||||||
crate::{
|
crate::{
|
||||||
|
backend::hbvm::HbvmBackend,
|
||||||
lexer::TokenKind,
|
lexer::TokenKind,
|
||||||
parser,
|
parser,
|
||||||
son::{hbvm::HbvmBackend, Codegen, CodegenCtx},
|
son::{Codegen, CodegenCtx},
|
||||||
ty::Module,
|
ty::Module,
|
||||||
},
|
},
|
||||||
alloc::string::String,
|
alloc::string::String,
|
||||||
|
|
1257
lang/src/lib.rs
1257
lang/src/lib.rs
File diff suppressed because it is too large
Load diff
|
@ -369,62 +369,35 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
expr
|
expr
|
||||||
}
|
}
|
||||||
T::Struct => E::Struct {
|
T::Struct => E::Struct {
|
||||||
|
pos,
|
||||||
packed: core::mem::take(&mut self.packed),
|
packed: core::mem::take(&mut self.packed),
|
||||||
fields: {
|
fields: self.collect_fields(&mut must_trail, |s| {
|
||||||
self.ns_bound = self.ctx.idents.len();
|
if s.lexer.taste().kind != T::Colon {
|
||||||
self.expect_advance(T::LBrace)?;
|
return Some(None);
|
||||||
self.collect_list(T::Comma, T::RBrace, |s| {
|
}
|
||||||
let tok = s.token;
|
|
||||||
Some(if s.advance_if(T::Comment) {
|
|
||||||
CommentOr::Comment { literal: s.tok_str(tok), pos: tok.start }
|
|
||||||
} else if s.lexer.taste().kind == T::Colon {
|
|
||||||
let name = s.expect_advance(T::Ident)?;
|
let name = s.expect_advance(T::Ident)?;
|
||||||
s.expect_advance(T::Colon)?;
|
s.expect_advance(T::Colon)?;
|
||||||
CommentOr::Or(Ok(StructField {
|
Some(Some(StructField {
|
||||||
pos: name.start,
|
pos: name.start,
|
||||||
name: s.tok_str(name),
|
name: s.tok_str(name),
|
||||||
ty: s.expr()?,
|
ty: s.expr()?,
|
||||||
}))
|
}))
|
||||||
} else {
|
})?,
|
||||||
must_trail = true;
|
captured: self.collect_captures(prev_boundary, prev_captured),
|
||||||
CommentOr::Or(Err(
|
|
||||||
s.collect_list_low(T::Semi, T::RBrace, true, |s| s.expr_low(true))
|
|
||||||
))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
},
|
|
||||||
captured: {
|
|
||||||
self.ns_bound = prev_boundary;
|
|
||||||
let captured = &mut self.ctx.captured[prev_captured..];
|
|
||||||
crate::quad_sort(captured, core::cmp::Ord::cmp);
|
|
||||||
let preserved = captured.partition_dedup().0.len();
|
|
||||||
self.ctx.captured.truncate(prev_captured + preserved);
|
|
||||||
self.arena.alloc_slice(&self.ctx.captured[prev_captured..])
|
|
||||||
},
|
|
||||||
pos: {
|
|
||||||
if self.ns_bound == 0 {
|
|
||||||
// we might save some memory
|
|
||||||
self.ctx.captured.clear();
|
|
||||||
}
|
|
||||||
pos
|
|
||||||
},
|
|
||||||
trailing_comma: core::mem::take(&mut self.trailing_sep) || must_trail,
|
trailing_comma: core::mem::take(&mut self.trailing_sep) || must_trail,
|
||||||
},
|
},
|
||||||
T::Enum => E::Enum {
|
T::Enum => E::Enum {
|
||||||
pos,
|
pos,
|
||||||
variants: {
|
variants: self.collect_fields(&mut must_trail, |s| {
|
||||||
self.expect_advance(T::LBrace)?;
|
if !matches!(s.lexer.taste().kind, T::Comma | T::RBrace) {
|
||||||
self.collect_list(T::Comma, T::RBrace, |s| {
|
return Some(None);
|
||||||
let tok = s.token;
|
}
|
||||||
Some(if s.advance_if(T::Comment) {
|
|
||||||
CommentOr::Comment { literal: s.tok_str(tok), pos: tok.start }
|
|
||||||
} else {
|
|
||||||
let name = s.expect_advance(T::Ident)?;
|
let name = s.expect_advance(T::Ident)?;
|
||||||
CommentOr::Or(EnumField { pos: name.start, name: s.tok_str(name) })
|
Some(Some(EnumField { pos: name.start, name: s.tok_str(name) }))
|
||||||
})
|
})?,
|
||||||
})
|
captured: self.collect_captures(prev_boundary, prev_captured),
|
||||||
},
|
trailing_comma: core::mem::take(&mut self.trailing_sep) || must_trail,
|
||||||
trailing_comma: core::mem::take(&mut self.trailing_sep),
|
|
||||||
},
|
},
|
||||||
T::Ident | T::CtIdent => {
|
T::Ident | T::CtIdent => {
|
||||||
let (id, is_first) = self.resolve_ident(token);
|
let (id, is_first) = self.resolve_ident(token);
|
||||||
|
@ -624,6 +597,45 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn collect_fields<T: Copy>(
|
||||||
|
&mut self,
|
||||||
|
must_trail: &mut bool,
|
||||||
|
mut parse_field: impl FnMut(&mut Self) -> Option<Option<T>>,
|
||||||
|
) -> Option<FieldList<'a, T>> {
|
||||||
|
use TokenKind as T;
|
||||||
|
self.ns_bound = self.ctx.idents.len();
|
||||||
|
self.expect_advance(T::LBrace)?;
|
||||||
|
Some(self.collect_list(T::Comma, T::RBrace, |s| {
|
||||||
|
let tok = s.token;
|
||||||
|
Some(if s.advance_if(T::Comment) {
|
||||||
|
CommentOr::Comment { literal: s.tok_str(tok), pos: tok.start }
|
||||||
|
} else if let Some(field) = parse_field(s)? {
|
||||||
|
CommentOr::Or(Ok(field))
|
||||||
|
} else {
|
||||||
|
*must_trail = true;
|
||||||
|
CommentOr::Or(Err(
|
||||||
|
s.collect_list_low(T::Semi, T::RBrace, true, |s| s.expr_low(true))
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_captures(&mut self, prev_captured: usize, prev_boundary: usize) -> &'a [Ident] {
|
||||||
|
self.ns_bound = prev_boundary;
|
||||||
|
let captured = &mut self.ctx.captured[prev_captured..];
|
||||||
|
crate::quad_sort(captured, core::cmp::Ord::cmp);
|
||||||
|
let preserved = captured.partition_dedup().0.len();
|
||||||
|
self.ctx.captured.truncate(prev_captured + preserved);
|
||||||
|
let slc = self.arena.alloc_slice(&self.ctx.captured[prev_captured..]);
|
||||||
|
|
||||||
|
if self.ns_bound == 0 {
|
||||||
|
// we might save some memory
|
||||||
|
self.ctx.captured.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
slc
|
||||||
|
}
|
||||||
|
|
||||||
fn advance_ident(&mut self) -> Option<Token> {
|
fn advance_ident(&mut self) -> Option<Token> {
|
||||||
let next = self.next();
|
let next = self.next();
|
||||||
if matches!(next.kind, TokenKind::Ident | TokenKind::CtIdent) {
|
if matches!(next.kind, TokenKind::Ident | TokenKind::CtIdent) {
|
||||||
|
@ -841,6 +853,8 @@ pub enum Radix {
|
||||||
Decimal = 10,
|
Decimal = 10,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type FieldList<'a, T> = &'a [CommentOr<'a, Result<T, &'a [Expr<'a>]>>];
|
||||||
|
|
||||||
generate_expr! {
|
generate_expr! {
|
||||||
/// `LIST(start, sep, end, elem) => start { elem sep } [elem] end`
|
/// `LIST(start, sep, end, elem) => start { elem sep } [elem] end`
|
||||||
/// `OP := grep for `#define OP:`
|
/// `OP := grep for `#define OP:`
|
||||||
|
@ -958,7 +972,7 @@ generate_expr! {
|
||||||
/// `'struct' LIST('{', ',', '}', Ident ':' Expr)`
|
/// `'struct' LIST('{', ',', '}', Ident ':' Expr)`
|
||||||
Struct {
|
Struct {
|
||||||
pos: Pos,
|
pos: Pos,
|
||||||
fields: &'a [CommentOr<'a, Result<StructField<'a>, &'a[Self]>>],
|
fields: FieldList<'a, StructField<'a>>,
|
||||||
captured: &'a [Ident],
|
captured: &'a [Ident],
|
||||||
trailing_comma: bool,
|
trailing_comma: bool,
|
||||||
packed: bool,
|
packed: bool,
|
||||||
|
@ -966,7 +980,8 @@ generate_expr! {
|
||||||
/// `'enum' LIST('{', ',', '}', Ident)`
|
/// `'enum' LIST('{', ',', '}', Ident)`
|
||||||
Enum {
|
Enum {
|
||||||
pos: Pos,
|
pos: Pos,
|
||||||
variants: &'a [CommentOr<'a, EnumField<'a>>],
|
variants: FieldList<'a, EnumField<'a>>,
|
||||||
|
captured: &'a [Ident],
|
||||||
trailing_comma: bool,
|
trailing_comma: bool,
|
||||||
},
|
},
|
||||||
/// `[Expr] LIST('.{', ',', '}', Ident [':' Expr])`
|
/// `[Expr] LIST('.{', ',', '}', Ident [':' Expr])`
|
||||||
|
|
422
lang/src/son.rs
422
lang/src/son.rs
|
@ -1,21 +1,25 @@
|
||||||
use {
|
use {
|
||||||
self::{
|
self::strong_ref::StrongRef,
|
||||||
hbvm::{Comptime, HbvmBackend},
|
|
||||||
strong_ref::StrongRef,
|
|
||||||
},
|
|
||||||
crate::{
|
crate::{
|
||||||
|
backend::{
|
||||||
|
hbvm::{Comptime, HbvmBackend},
|
||||||
|
Backend,
|
||||||
|
},
|
||||||
ctx_map::CtxEntry,
|
ctx_map::CtxEntry,
|
||||||
debug,
|
debug,
|
||||||
lexer::{self, TokenKind},
|
lexer::{self, TokenKind},
|
||||||
parser::{
|
parser::{
|
||||||
self,
|
self,
|
||||||
idfl::{self},
|
idfl::{self},
|
||||||
CommentOr, CtorField, Expr, ExprRef, MatchBranch, Pos,
|
CommentOr, CtorField, Expr, ExprRef, FieldList, MatchBranch, Pos,
|
||||||
|
},
|
||||||
|
ty::{
|
||||||
|
self, Arg, ArrayLen, CompState, ConstData, EnumData, EnumField, FTask, FuncData,
|
||||||
|
GlobalData, Loc, Module, Offset, OffsetIter, OptLayout, Sig, StringRef, StructData,
|
||||||
|
StructField, SymKey, Tuple, TypeBase, TypeIns, Types,
|
||||||
},
|
},
|
||||||
ty::{self, Arg, ArrayLen, Loc, Module, Tuple},
|
|
||||||
utils::{BitSet, Ent, Vc},
|
utils::{BitSet, Ent, Vc},
|
||||||
CompState, Const, Enum, EnumField, FTask, Func, Global, Ident, Offset, OffsetIter,
|
Ident,
|
||||||
OptLayout, Sig, StringRef, Struct, StructField, SymKey, Types,
|
|
||||||
},
|
},
|
||||||
alloc::{string::String, vec::Vec},
|
alloc::{string::String, vec::Vec},
|
||||||
core::{
|
core::{
|
||||||
|
@ -29,52 +33,18 @@ use {
|
||||||
hbbytecode::DisasmError,
|
hbbytecode::DisasmError,
|
||||||
};
|
};
|
||||||
|
|
||||||
const VOID: Nid = 0;
|
pub const VOID: Nid = 0;
|
||||||
const NEVER: Nid = 1;
|
pub const NEVER: Nid = 1;
|
||||||
const ENTRY: Nid = 2;
|
pub const ENTRY: Nid = 2;
|
||||||
const MEM: Nid = 3;
|
pub const MEM: Nid = 3;
|
||||||
const LOOPS: Nid = 4;
|
pub const LOOPS: Nid = 4;
|
||||||
const ARG_START: usize = 3;
|
pub const ARG_START: usize = 3;
|
||||||
const DEFAULT_ACLASS: usize = 0;
|
const DEFAULT_ACLASS: usize = 0;
|
||||||
const GLOBAL_ACLASS: usize = 1;
|
const GLOBAL_ACLASS: usize = 1;
|
||||||
|
|
||||||
pub mod hbvm;
|
pub type Nid = u16;
|
||||||
|
|
||||||
type Nid = u16;
|
|
||||||
type AClassId = i16;
|
type AClassId = i16;
|
||||||
|
|
||||||
pub struct AssemblySpec {
|
|
||||||
entry: u32,
|
|
||||||
code_length: u64,
|
|
||||||
data_length: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Backend {
|
|
||||||
fn assemble_reachable(
|
|
||||||
&mut self,
|
|
||||||
from: ty::Func,
|
|
||||||
types: &Types,
|
|
||||||
to: &mut Vec<u8>,
|
|
||||||
) -> AssemblySpec;
|
|
||||||
fn disasm<'a>(
|
|
||||||
&'a self,
|
|
||||||
sluce: &[u8],
|
|
||||||
eca_handler: &mut dyn FnMut(&mut &[u8]),
|
|
||||||
types: &'a Types,
|
|
||||||
files: &'a [parser::Ast],
|
|
||||||
output: &mut String,
|
|
||||||
) -> Result<(), hbbytecode::DisasmError<'a>>;
|
|
||||||
fn emit_body(&mut self, id: ty::Func, ci: &mut Nodes, tys: &Types, files: &[parser::Ast]);
|
|
||||||
|
|
||||||
fn emit_ct_body(&mut self, id: ty::Func, ci: &mut Nodes, tys: &Types, files: &[parser::Ast]) {
|
|
||||||
self.emit_body(id, ci, tys, files);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assemble_bin(&mut self, from: ty::Func, types: &Types, to: &mut Vec<u8>) {
|
|
||||||
self.assemble_reachable(from, types, to);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Lookup = crate::ctx_map::CtxMap<Nid>;
|
type Lookup = crate::ctx_map::CtxMap<Nid>;
|
||||||
|
|
||||||
impl crate::ctx_map::CtxEntry for Nid {
|
impl crate::ctx_map::CtxEntry for Nid {
|
||||||
|
@ -124,7 +94,25 @@ impl Default for Nodes {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Nodes {
|
impl Nodes {
|
||||||
fn loop_depth(&self, target: Nid, scheds: Option<&[Nid]>) -> LoopDepth {
|
#[inline]
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.values.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.values.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_ty(&self, cint: Nid) -> ty::Id {
|
||||||
|
debug_assert_eq!(self[cint].ty, ty::Id::TYPE);
|
||||||
|
ty::Id::from(match self[cint].kind {
|
||||||
|
Kind::CInt { value } => value as u64,
|
||||||
|
_ => unreachable!("triing to cast non constant to a type: {:?}", self[cint]),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn loop_depth(&self, target: Nid, scheds: Option<&[Nid]>) -> LoopDepth {
|
||||||
self[target].loop_depth.set(match self[target].kind {
|
self[target].loop_depth.set(match self[target].kind {
|
||||||
Kind::Region | Kind::Entry | Kind::Then | Kind::Else | Kind::Call { .. } | Kind::If => {
|
Kind::Region | Kind::Entry | Kind::Then | Kind::Else | Kind::Call { .. } | Kind::If => {
|
||||||
if self[target].loop_depth.get() != 0 {
|
if self[target].loop_depth.get() != 0 {
|
||||||
|
@ -552,7 +540,7 @@ impl Nodes {
|
||||||
self[to].inputs.push(from);
|
self[to].inputs.push(from);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn use_block(&self, target: Nid, from: Nid, scheds: Option<&[Nid]>) -> Nid {
|
pub fn use_block(&self, target: Nid, from: Nid, scheds: Option<&[Nid]>) -> Nid {
|
||||||
if self[from].kind != Kind::Phi {
|
if self[from].kind != Kind::Phi {
|
||||||
return self.idom(from, scheds);
|
return self.idom(from, scheds);
|
||||||
}
|
}
|
||||||
|
@ -563,7 +551,7 @@ impl Nodes {
|
||||||
self[self[from].inputs[0]].inputs[index - 1]
|
self[self[from].inputs[0]].inputs[index - 1]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn idom(&self, target: Nid, scheds: Option<&[Nid]>) -> Nid {
|
pub fn idom(&self, target: Nid, scheds: Option<&[Nid]>) -> Nid {
|
||||||
match self[target].kind {
|
match self[target].kind {
|
||||||
Kind::Start => unreachable!(),
|
Kind::Start => unreachable!(),
|
||||||
Kind::End => unreachable!(),
|
Kind::End => unreachable!(),
|
||||||
|
@ -839,20 +827,22 @@ impl Nodes {
|
||||||
Value::new(self.new_node(ty, kind, inps, tys)).ty(ty)
|
Value::new(self.new_node(ty, kind, inps, tys)).ty(ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_locked(&self, target: Nid) -> bool {
|
// TODO: make this internal to son and force backends to track locks thelself
|
||||||
|
|
||||||
|
pub fn is_locked(&self, target: Nid) -> bool {
|
||||||
self[target].lock_rc.get() != 0
|
self[target].lock_rc.get() != 0
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_unlocked(&self, target: Nid) -> bool {
|
pub fn is_unlocked(&self, target: Nid) -> bool {
|
||||||
self[target].lock_rc.get() == 0
|
self[target].lock_rc.get() == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lock(&self, target: Nid) {
|
pub fn lock(&self, target: Nid) {
|
||||||
self[target].lock_rc.set(self[target].lock_rc.get() + 1);
|
self[target].lock_rc.set(self[target].lock_rc.get() + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn unlock(&self, target: Nid) {
|
pub fn unlock(&self, target: Nid) {
|
||||||
self[target].lock_rc.set(self[target].lock_rc.get() - 1);
|
self[target].lock_rc.set(self[target].lock_rc.get() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1650,7 +1640,7 @@ impl Nodes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_const(&self, id: Nid) -> bool {
|
pub fn is_const(&self, id: Nid) -> bool {
|
||||||
matches!(self[id].kind, Kind::CInt { .. })
|
matches!(self[id].kind, Kind::CInt { .. })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2007,7 +1997,7 @@ impl Nodes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dominates(&self, dominator: Nid, mut dominated: Nid, scheds: Option<&[Nid]>) -> bool {
|
pub fn dominates(&self, dominator: Nid, mut dominated: Nid, scheds: Option<&[Nid]>) -> bool {
|
||||||
loop {
|
loop {
|
||||||
if dominator == dominated {
|
if dominator == dominated {
|
||||||
break true;
|
break true;
|
||||||
|
@ -2023,7 +2013,7 @@ impl Nodes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_data_dep(&self, val: Nid, user: Nid) -> bool {
|
pub fn is_data_dep(&self, val: Nid, user: Nid) -> bool {
|
||||||
match self[user].kind {
|
match self[user].kind {
|
||||||
Kind::Return { .. } => self[user].inputs[1] == val,
|
Kind::Return { .. } => self[user].inputs[1] == val,
|
||||||
_ if self.is_cfg(user) && !matches!(self[user].kind, Kind::Call { .. } | Kind::If) => {
|
_ if self.is_cfg(user) && !matches!(self[user].kind, Kind::Call { .. } | Kind::If) => {
|
||||||
|
@ -2050,7 +2040,7 @@ impl Nodes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn this_or_delegates<'a>(&'a self, source: Nid, target: &'a Nid) -> (Nid, &'a [Nid]) {
|
pub fn this_or_delegates<'a>(&'a self, source: Nid, target: &'a Nid) -> (Nid, &'a [Nid]) {
|
||||||
if self.is_unlocked(*target) {
|
if self.is_unlocked(*target) {
|
||||||
(source, core::slice::from_ref(target))
|
(source, core::slice::from_ref(target))
|
||||||
} else {
|
} else {
|
||||||
|
@ -2058,7 +2048,7 @@ impl Nodes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_hard_zero(&self, nid: Nid) -> bool {
|
pub fn is_hard_zero(&self, nid: Nid) -> bool {
|
||||||
self[nid].kind == Kind::CInt { value: 0 }
|
self[nid].kind == Kind::CInt { value: 0 }
|
||||||
&& self[nid].outputs.iter().all(|&n| self[n].kind != Kind::Phi)
|
&& self[nid].outputs.iter().all(|&n| self[n].kind != Kind::Phi)
|
||||||
}
|
}
|
||||||
|
@ -2178,7 +2168,7 @@ impl Kind {
|
||||||
matches!(self, Self::Arg | Self::Mem | Self::Loops | Self::Entry)
|
matches!(self, Self::Arg | Self::Mem | Self::Loops | Self::Entry)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_cfg(&self) -> bool {
|
pub fn is_cfg(&self) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
self,
|
self,
|
||||||
Self::Start
|
Self::Start
|
||||||
|
@ -2199,7 +2189,7 @@ impl Kind {
|
||||||
matches!(self, Self::Return { .. } | Self::If | Self::End | Self::Die)
|
matches!(self, Self::Return { .. } | Self::If | Self::End | Self::Die)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn starts_basic_block(&self) -> bool {
|
pub fn starts_basic_block(&self) -> bool {
|
||||||
matches!(self, Self::Region | Self::Loop | Self::Start | Kind::Then | Kind::Else)
|
matches!(self, Self::Region | Self::Loop | Self::Start | Kind::Then | Kind::Else)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2224,13 +2214,14 @@ impl fmt::Display for Kind {
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
pub struct Node {
|
pub struct Node {
|
||||||
kind: Kind,
|
pub kind: Kind,
|
||||||
inputs: Vc,
|
pub inputs: Vc,
|
||||||
outputs: Vc,
|
pub outputs: Vc,
|
||||||
peep_triggers: Vc,
|
pub peep_triggers: Vc,
|
||||||
clobbers: BitSet,
|
pub clobbers: BitSet,
|
||||||
ty: ty::Id,
|
pub ty: ty::Id,
|
||||||
pos: Pos,
|
pub pos: Pos,
|
||||||
|
|
||||||
depth: Cell<IDomDepth>,
|
depth: Cell<IDomDepth>,
|
||||||
lock_rc: Cell<LockRc>,
|
lock_rc: Cell<LockRc>,
|
||||||
loop_depth: Cell<LoopDepth>,
|
loop_depth: Cell<LoopDepth>,
|
||||||
|
@ -2260,11 +2251,11 @@ impl Node {
|
||||||
matches!(self.kind, Kind::Stre | Kind::Load | Kind::Stck)
|
matches!(self.kind, Kind::Stre | Kind::Load | Kind::Stck)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_data_phi(&self) -> bool {
|
pub fn is_data_phi(&self) -> bool {
|
||||||
self.kind == Kind::Phi && self.ty != ty::Id::VOID
|
self.kind == Kind::Phi && self.ty != ty::Id::VOID
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_no_value(&self) -> bool {
|
pub fn has_no_value(&self) -> bool {
|
||||||
(self.kind.is_cfg() && (!self.kind.is_call() || self.ty == ty::Id::VOID))
|
(self.kind.is_cfg() && (!self.kind.is_call() || self.ty == ty::Id::VOID))
|
||||||
|| matches!(self.kind, Kind::Stre)
|
|| matches!(self.kind, Kind::Stre)
|
||||||
}
|
}
|
||||||
|
@ -2712,7 +2703,7 @@ impl<'a> Codegen<'a> {
|
||||||
|
|
||||||
pub fn push_embeds(&mut self, embeds: Vec<Vec<u8>>) {
|
pub fn push_embeds(&mut self, embeds: Vec<Vec<u8>>) {
|
||||||
for data in embeds {
|
for data in embeds {
|
||||||
let g = Global {
|
let g = GlobalData {
|
||||||
ty: self.tys.make_array(ty::Id::U8, data.len() as _),
|
ty: self.tys.make_array(ty::Id::U8, data.len() as _),
|
||||||
data,
|
data,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -2740,13 +2731,13 @@ impl<'a> Codegen<'a> {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
let fuc = self.tys.ins.funcs.push(Func {
|
let fuc = self.tys.ins.funcs.push(FuncData {
|
||||||
file,
|
file,
|
||||||
sig: Some(Sig { args: Tuple::empty(), ret }),
|
sig: Some(Sig { args: Tuple::empty(), ret }),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
self.ct_backend.emit_ct_body(fuc, &mut self.ci.nodes, self.tys, self.files);
|
self.ct_backend.emit_ct_body(fuc, &self.ci.nodes, self.tys, self.files);
|
||||||
|
|
||||||
// TODO: return them back
|
// TODO: return them back
|
||||||
|
|
||||||
|
@ -2948,7 +2939,7 @@ impl<'a> Codegen<'a> {
|
||||||
let literal = &literal[1..literal.len() - 1];
|
let literal = &literal[1..literal.len() - 1];
|
||||||
|
|
||||||
let report = |bytes: &core::str::Bytes, message: &str| {
|
let report = |bytes: &core::str::Bytes, message: &str| {
|
||||||
self.error(pos + (literal.len() - bytes.len()) as u32 - 1, message)
|
self.error(pos + (literal.len() - bytes.len()) as u32 - 1, message);
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut data = Vec::<u8>::with_capacity(literal.len());
|
let mut data = Vec::<u8>::with_capacity(literal.len());
|
||||||
|
@ -2960,8 +2951,11 @@ impl<'a> Codegen<'a> {
|
||||||
occupied_entry.get_key_value().0.value.0
|
occupied_entry.get_key_value().0.value.0
|
||||||
}
|
}
|
||||||
(hash_map::RawEntryMut::Vacant(vacant_entry), hash) => {
|
(hash_map::RawEntryMut::Vacant(vacant_entry), hash) => {
|
||||||
let global =
|
let global = self.tys.ins.globals.push(GlobalData {
|
||||||
self.tys.ins.globals.push(Global { data, ty, ..Default::default() });
|
data,
|
||||||
|
ty,
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
vacant_entry
|
vacant_entry
|
||||||
.insert(crate::ctx_map::Key { value: StringRef(global), hash }, ())
|
.insert(crate::ctx_map::Key { value: StringRef(global), hash }, ())
|
||||||
.0
|
.0
|
||||||
|
@ -4217,20 +4211,13 @@ impl<'a> Codegen<'a> {
|
||||||
let tty = vtarget.ty;
|
let tty = vtarget.ty;
|
||||||
|
|
||||||
match self.tys.base_of(tty).unwrap_or(tty).expand() {
|
match self.tys.base_of(tty).unwrap_or(tty).expand() {
|
||||||
ty::Kind::Module(m) => {
|
ty::Kind::Module(m) => self.find_type_as_value(pos, m, m, Err(name), ctx),
|
||||||
match self.find_type(pos, self.ci.file, m, self.ci.parent, Err(name)).expand() {
|
|
||||||
ty::Kind::NEVER => Value::NEVER,
|
|
||||||
ty::Kind::Global(global) => self.gen_global(global),
|
|
||||||
ty::Kind::Const(cnst) => self.gen_const(cnst, ctx),
|
|
||||||
v => Some(self.ci.nodes.new_const_lit(ty::Id::TYPE, v.compress())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ty::Kind::Enum(e) => {
|
ty::Kind::Enum(e) => {
|
||||||
let intrnd = self.tys.names.project(name);
|
let intrnd = self.tys.names.project(name);
|
||||||
self.gen_enum_variant(pos, e, intrnd)
|
self.gen_enum_variant(pos, e, intrnd)
|
||||||
}
|
}
|
||||||
ty::Kind::Struct(s) => {
|
ty::Kind::Struct(s) => {
|
||||||
let Struct { ast, file, .. } = self.tys.ins.structs[s];
|
let TypeBase { ast, file, .. } = *self.tys.ins.structs[s];
|
||||||
if let Some((offset, ty)) = OffsetIter::offset_of(self.tys, s, name) {
|
if let Some((offset, ty)) = OffsetIter::offset_of(self.tys, s, name) {
|
||||||
Some(Value::ptr(self.offset(vtarget.id, offset)).ty(ty))
|
Some(Value::ptr(self.offset(vtarget.id, offset)).ty(ty))
|
||||||
} else if let Expr::Struct { fields: [.., CommentOr::Or(Err(_))], .. } =
|
} else if let Expr::Struct { fields: [.., CommentOr::Or(Err(_))], .. } =
|
||||||
|
@ -4258,51 +4245,39 @@ impl<'a> Codegen<'a> {
|
||||||
Value::NEVER
|
Value::NEVER
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::Kind::TYPE => match ty::Id::from(match self.ci.nodes[vtarget.id].kind {
|
ty::Kind::TYPE => match self.ci.nodes.as_ty(vtarget.id).expand() {
|
||||||
Kind::CInt { value } => value as u64,
|
|
||||||
_ => unreachable!(),
|
|
||||||
})
|
|
||||||
.expand()
|
|
||||||
{
|
|
||||||
ty::Kind::Struct(s) => {
|
ty::Kind::Struct(s) => {
|
||||||
let Struct { file, .. } = self.tys.ins.structs[s];
|
let TypeBase { file, .. } = *self.tys.ins.structs[s];
|
||||||
match self.find_type(pos, self.ci.file, file, s.into(), Err(name)).expand() {
|
self.find_type_as_value(pos, file, s, Err(name), ctx)
|
||||||
ty::Kind::NEVER => Value::NEVER,
|
}
|
||||||
ty::Kind::Global(global) => self.gen_global(global),
|
ty::Kind::Module(m) => self.find_type_as_value(pos, m, m, Err(name), ctx),
|
||||||
ty::Kind::Const(cnst) => self.gen_const(cnst, ctx),
|
ty::Kind::Enum(e) => {
|
||||||
v => Some(self.ci.nodes.new_const_lit(ty::Id::TYPE, v.compress())),
|
let intrnd = self.tys.names.project(name);
|
||||||
|
if let Some(index) =
|
||||||
|
self.tys.enum_fields(e).iter().position(|f| Some(f.name) == intrnd)
|
||||||
|
{
|
||||||
|
Some(self.ci.nodes.new_const_lit(e.into(), index as i64))
|
||||||
|
} else {
|
||||||
|
let TypeBase { file, .. } = *self.tys.ins.enums[e];
|
||||||
|
self.find_type_as_value(pos, file, e, Err(name), ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::Kind::Module(m) => {
|
ty => self.error(
|
||||||
match self.find_type(pos, self.ci.file, m, m.into(), Err(name)).expand() {
|
|
||||||
ty::Kind::NEVER => Value::NEVER,
|
|
||||||
ty::Kind::Global(global) => self.gen_global(global),
|
|
||||||
ty::Kind::Const(cnst) => self.gen_const(cnst, ctx),
|
|
||||||
v => Some(self.ci.nodes.new_const_lit(ty::Id::TYPE, v.compress())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ty => {
|
|
||||||
self.error(
|
|
||||||
pos,
|
pos,
|
||||||
fa!(
|
fa!(
|
||||||
"accesing scope on '{}' is not supported yet",
|
"accesing scope on '{}' is not supported yet",
|
||||||
self.ty_display(ty.compress())
|
self.ty_display(ty.compress())
|
||||||
),
|
),
|
||||||
);
|
),
|
||||||
Value::NEVER
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
_ => {
|
_ => self.error(
|
||||||
self.error(
|
|
||||||
pos,
|
pos,
|
||||||
fa!(
|
fa!(
|
||||||
"the '{}' is not a struct, or pointer to one, or enum, \
|
"the '{}' is not a struct, or pointer to one, or enum, \
|
||||||
fo field access does not make sense",
|
fo field access does not make sense",
|
||||||
self.ty_display(tty)
|
self.ty_display(tty)
|
||||||
),
|
),
|
||||||
);
|
),
|
||||||
Value::NEVER
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.map(Ok)
|
.map(Ok)
|
||||||
}
|
}
|
||||||
|
@ -4385,13 +4360,7 @@ impl<'a> Codegen<'a> {
|
||||||
match self.gen_field(Ctx::default(), target, pos, name)? {
|
match self.gen_field(Ctx::default(), target, pos, name)? {
|
||||||
Ok(mut fexpr) => {
|
Ok(mut fexpr) => {
|
||||||
self.assert_ty(func.pos(), &mut fexpr, ty::Id::TYPE, "function");
|
self.assert_ty(func.pos(), &mut fexpr, ty::Id::TYPE, "function");
|
||||||
(
|
(self.ci.nodes.as_ty(fexpr.id), None)
|
||||||
ty::Id::from(match self.ci.nodes[fexpr.id].kind {
|
|
||||||
Kind::CInt { value } => value as u64,
|
|
||||||
_ => unreachable!(),
|
|
||||||
}),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
Err((ty, val)) => (ty, Some(val)),
|
Err((ty, val)) => (ty, Some(val)),
|
||||||
}
|
}
|
||||||
|
@ -4410,7 +4379,7 @@ impl<'a> Codegen<'a> {
|
||||||
|
|
||||||
inline |= sig.ret == ty::Id::TYPE;
|
inline |= sig.ret == ty::Id::TYPE;
|
||||||
|
|
||||||
let Func { expr, file, is_inline, parent, .. } = self.tys.ins.funcs[fu];
|
let FuncData { expr, file, is_inline, parent, .. } = self.tys.ins.funcs[fu];
|
||||||
let ast = &self.files[file.index()];
|
let ast = &self.files[file.index()];
|
||||||
let &Expr::Closure { args: cargs, body, .. } = expr.get(ast) else { unreachable!() };
|
let &Expr::Closure { args: cargs, body, .. } = expr.get(ast) else { unreachable!() };
|
||||||
|
|
||||||
|
@ -4700,7 +4669,9 @@ impl<'a> Codegen<'a> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => self.error(pos, fa!("'{0} {op} {0}' is not supported", self.ty_display(ty))),
|
_ => {
|
||||||
|
_ = self.error(pos, fa!("'{0} {op} {0}' is not supported", self.ty_display(ty)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4769,7 +4740,7 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_signature(&mut self, func: &mut ty::Func, pos: Pos, args: &[Expr]) -> Option<Sig> {
|
fn compute_signature(&mut self, func: &mut ty::Func, pos: Pos, args: &[Expr]) -> Option<Sig> {
|
||||||
let Func { file, expr, sig, parent, .. } = self.tys.ins.funcs[*func];
|
let FuncData { file, expr, sig, parent, .. } = self.tys.ins.funcs[*func];
|
||||||
let fast = &self.files[file.index()];
|
let fast = &self.files[file.index()];
|
||||||
let &Expr::Closure { args: cargs, ret, .. } = expr.get(fast) else {
|
let &Expr::Closure { args: cargs, ret, .. } = expr.get(fast) else {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
|
@ -4825,11 +4796,11 @@ impl<'a> Codegen<'a> {
|
||||||
self.ci.scope.vars.drain(base..).for_each(|v| v.remove(&mut self.ci.nodes));
|
self.ci.scope.vars.drain(base..).for_each(|v| v.remove(&mut self.ci.nodes));
|
||||||
|
|
||||||
let sym = SymKey::FuncInst(*func, args);
|
let sym = SymKey::FuncInst(*func, args);
|
||||||
let ct = |ins: &mut crate::TypeIns| {
|
let ct = |ins: &mut TypeIns| {
|
||||||
let fuc = ins.funcs[*func];
|
let fuc = ins.funcs[*func];
|
||||||
debug_assert!(fuc.comp_state.iter().all(|&s| s == CompState::default()));
|
debug_assert!(fuc.comp_state.iter().all(|&s| s == CompState::default()));
|
||||||
ins.funcs
|
ins.funcs
|
||||||
.push(Func { base: Some(*func), sig: Some(Sig { args, ret }), ..fuc })
|
.push(FuncData { base: Some(*func), sig: Some(Sig { args, ret }), ..fuc })
|
||||||
.into()
|
.into()
|
||||||
};
|
};
|
||||||
let ty::Kind::Func(f) =
|
let ty::Kind::Func(f) =
|
||||||
|
@ -5078,7 +5049,7 @@ impl<'a> Codegen<'a> {
|
||||||
|
|
||||||
if self.finalize(prev_err_len) {
|
if self.finalize(prev_err_len) {
|
||||||
let backend = if !cct { &mut *self.backend } else { &mut *self.ct_backend };
|
let backend = if !cct { &mut *self.backend } else { &mut *self.ct_backend };
|
||||||
backend.emit_body(id, &mut self.ci.nodes, self.tys, self.files);
|
backend.emit_body(id, &self.ci.nodes, self.tys, self.files);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.ci.pos.pop();
|
self.ci.pos.pop();
|
||||||
|
@ -5396,9 +5367,10 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn error(&self, pos: Pos, msg: impl core::fmt::Display) {
|
fn error(&self, pos: Pos, msg: impl core::fmt::Display) -> Option<Value> {
|
||||||
let mut buf = self.errors.borrow_mut();
|
let mut buf = self.errors.borrow_mut();
|
||||||
write!(buf, "{}", self.file().report(pos, msg)).unwrap();
|
write!(buf, "{}", self.file().report(pos, msg)).unwrap();
|
||||||
|
Value::NEVER
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
|
@ -5419,10 +5391,7 @@ impl<'a> Codegen<'a> {
|
||||||
.vars
|
.vars
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|v| v.ty == ty::Id::TYPE)
|
.filter(|v| v.ty == ty::Id::TYPE)
|
||||||
.map(|v| match self.ci.nodes[v.value.get()].kind {
|
.map(|v| (self.ci.nodes.as_ty(v.value()), v.id))
|
||||||
Kind::CInt { value } => (value, v.id),
|
|
||||||
_ => unreachable!(),
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
self.pool.push_ci(file, self.ci.parent, Some(ret), self.tys.tasks.len(), &mut self.ci);
|
self.pool.push_ci(file, self.ci.parent, Some(ret), self.tys.tasks.len(), &mut self.ci);
|
||||||
self.ci.scope.vars = scope
|
self.ci.scope.vars = scope
|
||||||
|
@ -5475,7 +5444,7 @@ impl<'a> Codegen<'a> {
|
||||||
fn eval_global(&mut self, file: Module, name: Ident, expr: &Expr) -> ty::Id {
|
fn eval_global(&mut self, file: Module, name: Ident, expr: &Expr) -> ty::Id {
|
||||||
self.ct.activate();
|
self.ct.activate();
|
||||||
|
|
||||||
let gid = self.tys.ins.globals.push(Global { file, name, ..Default::default() });
|
let gid = self.tys.ins.globals.push(GlobalData { file, name, ..Default::default() });
|
||||||
|
|
||||||
self.pool.push_ci(file, self.ci.parent, None, self.tys.tasks.len(), &mut self.ci);
|
self.pool.push_ci(file, self.ci.parent, None, self.tys.tasks.len(), &mut self.ci);
|
||||||
let prev_err_len = self.errors.borrow().len();
|
let prev_err_len = self.errors.borrow().len();
|
||||||
|
@ -5509,12 +5478,12 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_local_ty(&mut self, ident: Ident) -> Option<ty::Id> {
|
fn find_local_ty(&mut self, ident: Ident) -> Option<ty::Id> {
|
||||||
self.ci.scope.vars.iter().rfind(|v| (v.id == ident && v.ty == ty::Id::TYPE)).map(|v| {
|
self.ci
|
||||||
match self.ci.nodes[v.value.get()].kind {
|
.scope
|
||||||
Kind::CInt { value } => ty::Id::from(value as u64),
|
.vars
|
||||||
k => unreachable!("{k:?}"),
|
.iter()
|
||||||
}
|
.rfind(|v| (v.id == ident && v.ty == ty::Id::TYPE))
|
||||||
})
|
.map(|v| self.ci.nodes.as_ty(v.value()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_type_in_file(&mut self, pos: Pos, file: Module, id: Result<Ident, &str>) -> ty::Id {
|
fn find_type_in_file(&mut self, pos: Pos, file: Module, id: Result<Ident, &str>) -> ty::Id {
|
||||||
|
@ -5525,6 +5494,22 @@ impl<'a> Codegen<'a> {
|
||||||
self.find_type(pos, self.ci.file, self.ci.file, self.ci.parent, id)
|
self.find_type(pos, self.ci.file, self.ci.file, self.ci.parent, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn find_type_as_value(
|
||||||
|
&mut self,
|
||||||
|
pos: Pos,
|
||||||
|
file: Module,
|
||||||
|
parent: impl Into<ty::Id>,
|
||||||
|
id: Result<Ident, &str>,
|
||||||
|
ctx: Ctx,
|
||||||
|
) -> Option<Value> {
|
||||||
|
match self.find_type(pos, self.ci.file, file, parent.into(), id).expand() {
|
||||||
|
ty::Kind::NEVER => Value::NEVER,
|
||||||
|
ty::Kind::Global(global) => self.gen_global(global),
|
||||||
|
ty::Kind::Const(cnst) => self.gen_const(cnst, ctx),
|
||||||
|
v => Some(self.ci.nodes.new_const_lit(ty::Id::TYPE, v.compress())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn find_type(
|
fn find_type(
|
||||||
&mut self,
|
&mut self,
|
||||||
pos: Pos,
|
pos: Pos,
|
||||||
|
@ -5548,7 +5533,7 @@ impl<'a> Codegen<'a> {
|
||||||
let mut piter = parent;
|
let mut piter = parent;
|
||||||
let Some((expr @ Expr::BinOp { left, right, .. }, name)) = (loop {
|
let Some((expr @ Expr::BinOp { left, right, .. }, name)) = (loop {
|
||||||
if let Some(f) =
|
if let Some(f) =
|
||||||
parser::find_decl(self.tys.scope_of(piter, f).unwrap_or(f.exprs()), &f.file, id)
|
parser::find_decl(self.tys.scope_of(piter, f).expect("TODO"), &f.file, id)
|
||||||
{
|
{
|
||||||
break Some(f);
|
break Some(f);
|
||||||
}
|
}
|
||||||
|
@ -5556,7 +5541,6 @@ impl<'a> Codegen<'a> {
|
||||||
if let Some((captures, capture_tuple)) = self.tys.captures_of(piter, f)
|
if let Some((captures, capture_tuple)) = self.tys.captures_of(piter, f)
|
||||||
&& let Some(idx) = captures.iter().position(|&cid| Ok(cid) == id)
|
&& let Some(idx) = captures.iter().position(|&cid| Ok(cid) == id)
|
||||||
{
|
{
|
||||||
debug_assert_eq!(captures.len(), capture_tuple.len());
|
|
||||||
return self.tys.ins.args[capture_tuple.range().start + idx];
|
return self.tys.ins.args[capture_tuple.range().start + idx];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5605,7 +5589,7 @@ impl<'a> Codegen<'a> {
|
||||||
self.tys
|
self.tys
|
||||||
.ins
|
.ins
|
||||||
.consts
|
.consts
|
||||||
.push(Const { ast: ExprRef::new(expr), name, file, parent })
|
.push(ConstData { ast: ExprRef::new(expr), name, file, parent })
|
||||||
.into()
|
.into()
|
||||||
} else {
|
} else {
|
||||||
self.parse_ty(
|
self.parse_ty(
|
||||||
|
@ -5687,78 +5671,37 @@ impl<'a> Codegen<'a> {
|
||||||
.map_or(ArrayLen::MAX, |expr| self.eval_const(sc.file, expr, ty::Id::U32) as _);
|
.map_or(ArrayLen::MAX, |expr| self.eval_const(sc.file, expr, ty::Id::U32) as _);
|
||||||
self.tys.make_array(ty, len)
|
self.tys.make_array(ty, len)
|
||||||
}
|
}
|
||||||
Expr::Struct { pos, fields, packed, captured, .. } => {
|
Expr::Struct { pos, fields, packed, captured, .. } => self.parse_base_ty(
|
||||||
let captures_start = self.tys.tmp.args.len();
|
|
||||||
for &cp in captured {
|
|
||||||
let ty = self.find_local_ty(cp).expect("TODO");
|
|
||||||
self.tys.tmp.args.push(ty);
|
|
||||||
}
|
|
||||||
let captured = self.tys.pack_args(captures_start).expect("TODO");
|
|
||||||
|
|
||||||
let sym = SymKey::Struct(sc.file, pos, captured);
|
|
||||||
if let Some(&ty) = self.tys.syms.get(sym, &self.tys.ins) {
|
|
||||||
return ty;
|
|
||||||
}
|
|
||||||
|
|
||||||
let prev_tmp = self.tys.tmp.struct_fields.len();
|
|
||||||
for field in fields.iter().filter_map(CommentOr::or).filter_map(Result::ok) {
|
|
||||||
let ty = self.parse_ty(sc.anon(), &field.ty);
|
|
||||||
let field = StructField { name: self.tys.names.intern(field.name), ty };
|
|
||||||
self.tys.tmp.struct_fields.push(field);
|
|
||||||
}
|
|
||||||
|
|
||||||
let ty = self
|
|
||||||
.tys
|
|
||||||
.ins
|
|
||||||
.structs
|
|
||||||
.push(Struct {
|
|
||||||
file: sc.file,
|
|
||||||
pos,
|
pos,
|
||||||
|
expr,
|
||||||
captured,
|
captured,
|
||||||
name: sc.name.unwrap_or_default(),
|
fields,
|
||||||
field_start: self.tys.ins.struct_fields.len() as _,
|
sc,
|
||||||
|
|s| [&mut s.ins.struct_fields, &mut s.tmp.struct_fields],
|
||||||
|
|s, field| {
|
||||||
|
let ty = s.parse_ty(sc.anon(), &field.ty);
|
||||||
|
StructField { name: s.tys.names.intern(field.name), ty }
|
||||||
|
},
|
||||||
|
|s, base| {
|
||||||
|
s.ins.structs.push(StructData {
|
||||||
|
base,
|
||||||
explicit_alignment: packed.then_some(1),
|
explicit_alignment: packed.then_some(1),
|
||||||
ast: ExprRef::new(expr),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.into();
|
},
|
||||||
|
),
|
||||||
self.tys.ins.struct_fields.extend(self.tys.tmp.struct_fields.drain(prev_tmp..));
|
Expr::Enum { pos, variants, captured, .. } => self.parse_base_ty(
|
||||||
|
|
||||||
self.tys.syms.insert(sym, ty, &self.tys.ins);
|
|
||||||
ty
|
|
||||||
}
|
|
||||||
Expr::Enum { pos, variants, .. } => {
|
|
||||||
let sym = SymKey::Enum(sc.file, pos);
|
|
||||||
if let Some(&ty) = self.tys.syms.get(sym, &self.tys.ins) {
|
|
||||||
return ty;
|
|
||||||
}
|
|
||||||
|
|
||||||
let prev_tmp = self.tys.tmp.enum_fields.len();
|
|
||||||
for field in variants.iter().filter_map(CommentOr::or) {
|
|
||||||
let field = EnumField { name: self.tys.names.intern(field.name) };
|
|
||||||
self.tys.tmp.enum_fields.push(field);
|
|
||||||
}
|
|
||||||
|
|
||||||
let ty = self
|
|
||||||
.tys
|
|
||||||
.ins
|
|
||||||
.enums
|
|
||||||
.push(Enum {
|
|
||||||
file: sc.file,
|
|
||||||
pos,
|
pos,
|
||||||
name: sc.name.unwrap_or_default(),
|
expr,
|
||||||
field_start: self.tys.ins.enum_fields.len() as _,
|
captured,
|
||||||
})
|
variants,
|
||||||
.into();
|
sc,
|
||||||
|
|s| [&mut s.ins.enum_fields, &mut s.tmp.enum_fields],
|
||||||
self.tys.ins.enum_fields.extend(self.tys.tmp.enum_fields.drain(prev_tmp..));
|
|s, field| EnumField { name: s.tys.names.intern(field.name) },
|
||||||
|
|s, base| s.ins.enums.push(EnumData { base }),
|
||||||
self.tys.syms.insert(sym, ty, &self.tys.ins);
|
),
|
||||||
ty
|
|
||||||
}
|
|
||||||
Expr::Closure { pos, args, ret, .. } if let Some(name) = sc.name => {
|
Expr::Closure { pos, args, ret, .. } if let Some(name) = sc.name => {
|
||||||
let func = Func {
|
let func = FuncData {
|
||||||
file: sc.file,
|
file: sc.file,
|
||||||
parent: sc.parent,
|
parent: sc.parent,
|
||||||
name,
|
name,
|
||||||
|
@ -5801,6 +5744,56 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[expect(clippy::too_many_arguments)]
|
||||||
|
fn parse_base_ty<A: Copy, F, T: Into<ty::Id>>(
|
||||||
|
&mut self,
|
||||||
|
pos: Pos,
|
||||||
|
expr: &Expr,
|
||||||
|
captured: &[Ident],
|
||||||
|
fields: FieldList<A>,
|
||||||
|
sc: TyScope,
|
||||||
|
get_fields: impl Fn(&mut Types) -> [&mut Vec<F>; 2],
|
||||||
|
check_field: impl Fn(&mut Self, A) -> F,
|
||||||
|
check: impl Fn(&mut Types, TypeBase) -> T,
|
||||||
|
) -> ty::Id {
|
||||||
|
let captures_start = self.tys.tmp.args.len();
|
||||||
|
for &cp in captured {
|
||||||
|
let ty = self.find_local_ty(cp).expect("TODO");
|
||||||
|
self.tys.tmp.args.push(ty);
|
||||||
|
}
|
||||||
|
let captured = self.tys.pack_args(captures_start).expect("TODO");
|
||||||
|
|
||||||
|
let sym = SymKey::Type(sc.file, pos, captured);
|
||||||
|
if let Some(&ty) = self.tys.syms.get(sym, &self.tys.ins) {
|
||||||
|
return ty;
|
||||||
|
}
|
||||||
|
|
||||||
|
let prev_tmp = get_fields(self.tys)[1].len();
|
||||||
|
for field in fields.iter().filter_map(CommentOr::or).filter_map(Result::ok) {
|
||||||
|
let field = check_field(self, field);
|
||||||
|
get_fields(self.tys)[1].push(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
let base = TypeBase {
|
||||||
|
file: sc.file,
|
||||||
|
parent: sc.parent,
|
||||||
|
pos,
|
||||||
|
captured,
|
||||||
|
name: sc.name.unwrap_or_default(),
|
||||||
|
field_start: self.tys.ins.struct_fields.len() as _,
|
||||||
|
ast: ExprRef::new(expr),
|
||||||
|
};
|
||||||
|
|
||||||
|
let [ins, tmp] = get_fields(self.tys);
|
||||||
|
ins.extend(tmp.drain(prev_tmp..));
|
||||||
|
|
||||||
|
let ty = check(self.tys, base).into();
|
||||||
|
|
||||||
|
self.tys.syms.insert(sym, ty, &self.tys.ins);
|
||||||
|
|
||||||
|
ty
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
|
@ -5820,8 +5813,11 @@ impl TyScope {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use {
|
use {
|
||||||
super::{hbvm::HbvmBackend, CodegenCtx},
|
crate::{
|
||||||
crate::ty,
|
backend::hbvm::{self, HbvmBackend},
|
||||||
|
son::CodegenCtx,
|
||||||
|
ty,
|
||||||
|
},
|
||||||
alloc::{string::String, vec::Vec},
|
alloc::{string::String, vec::Vec},
|
||||||
core::fmt::Write,
|
core::fmt::Write,
|
||||||
};
|
};
|
||||||
|
@ -5856,7 +5852,7 @@ mod tests {
|
||||||
} else {
|
} else {
|
||||||
log::info!("================ running {ident} ==============");
|
log::info!("================ running {ident} ==============");
|
||||||
log::trace!("{output}");
|
log::trace!("{output}");
|
||||||
super::hbvm::test_run_vm(&out, output);
|
hbvm::test_run_vm(&out, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
1077
lang/src/ty.rs
Normal file
1077
lang/src/ty.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -14,9 +14,9 @@ main:
|
||||||
CP r35, r0
|
CP r35, r0
|
||||||
LI64 r36, 30d
|
LI64 r36, 30d
|
||||||
LI64 r37, 100d
|
LI64 r37, 100d
|
||||||
|
CP r34, r35
|
||||||
CP r32, r35
|
CP r32, r35
|
||||||
CP r33, r35
|
CP r33, r35
|
||||||
CP r34, r35
|
|
||||||
5: JLTU r34, r36, :0
|
5: JLTU r34, r36, :0
|
||||||
ADDI64 r32, r32, 1d
|
ADDI64 r32, r32, 1d
|
||||||
CP r2, r35
|
CP r2, r35
|
||||||
|
|
Loading…
Reference in a new issue