forked from AbleOS/holey-bytes
fixing undescriptive error or not enough arguments
This commit is contained in:
parent
faa8dd2e6f
commit
517850f283
|
@ -19,7 +19,7 @@ unsafe extern "C" fn fmt() {
|
|||
let code = core::str::from_raw_parts(core::ptr::addr_of!(INPUT).cast(), INPUT_LEN);
|
||||
|
||||
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
||||
let mut ctx = parser::ParserCtx::default();
|
||||
let mut ctx = parser::Ctx::default();
|
||||
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
|
||||
|
||||
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
||||
|
|
|
@ -53,7 +53,7 @@ unsafe fn compile_and_run(mut fuel: usize) {
|
|||
};
|
||||
|
||||
let files = {
|
||||
let mut ctx = hblang::parser::ParserCtx::default();
|
||||
let mut ctx = hblang::parser::Ctx::default();
|
||||
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
||||
let mut loader = |path: &str, _: &str, kind| match kind {
|
||||
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap() as FileId),
|
||||
|
|
|
@ -728,7 +728,9 @@ sqrt := fn(x: uint): uint {
|
|||
g := 0
|
||||
b := 32768
|
||||
bshift := 15
|
||||
loop if b == 0 break else {
|
||||
loop if b == 0 {
|
||||
break
|
||||
} else {
|
||||
bshift -= 1
|
||||
temp = b + (g << 1)
|
||||
temp <<= bshift
|
||||
|
|
|
@ -2649,13 +2649,21 @@ impl Codegen {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use alloc::{string::String, vec::Vec};
|
||||
use {
|
||||
crate::parser,
|
||||
alloc::{string::String, vec::Vec},
|
||||
};
|
||||
|
||||
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
||||
_ = log::set_logger(&crate::fs::Logger);
|
||||
log::set_max_level(log::LevelFilter::Debug);
|
||||
|
||||
let (files, embeds) = crate::test_parse_files(ident, input);
|
||||
let mut ctx = parser::Ctx::default();
|
||||
let (files, embeds) = crate::test_parse_files(ident, input, &mut ctx);
|
||||
if !ctx.errors.get_mut().is_empty() {
|
||||
output.push_str(ctx.errors.get_mut());
|
||||
return;
|
||||
}
|
||||
let mut codegen = super::Codegen { files, ..Default::default() };
|
||||
codegen.push_embeds(embeds);
|
||||
|
||||
|
|
|
@ -451,7 +451,7 @@ pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Res
|
|||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use {
|
||||
crate::parser::{self, ParserCtx},
|
||||
crate::parser::{self, Ctx},
|
||||
alloc::borrow::ToOwned,
|
||||
std::{fmt::Write, string::String},
|
||||
};
|
||||
|
@ -461,8 +461,7 @@ pub mod test {
|
|||
let len = crate::fmt::minify(&mut minned);
|
||||
minned.truncate(len);
|
||||
|
||||
let ast =
|
||||
parser::Ast::new(ident, minned, &mut ParserCtx::default(), &mut parser::no_loader);
|
||||
let ast = parser::Ast::new(ident, minned, &mut Ctx::default(), &mut parser::no_loader);
|
||||
//log::error!(
|
||||
// "{} / {} = {} | {} / {} = {}",
|
||||
// ast.mem.size(),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use {
|
||||
crate::{
|
||||
codegen,
|
||||
parser::{self, Ast, FileKind, ParserCtx},
|
||||
parser::{self, Ast, Ctx, FileKind},
|
||||
son,
|
||||
},
|
||||
alloc::{string::String, vec::Vec},
|
||||
|
@ -89,10 +89,11 @@ pub fn run_compiler(root_file: &str, options: Options, out: &mut Vec<u8>) -> std
|
|||
let ast = parsed.ast.into_iter().next().unwrap();
|
||||
write!(out, "{ast}").unwrap();
|
||||
} else if options.optimize {
|
||||
let mut codegen = son::Codegen::default();
|
||||
codegen.files = &parsed.ast;
|
||||
codegen.push_embeds(parsed.embeds);
|
||||
let mut ctx = crate::son::CodegenCtx::default();
|
||||
*ctx.parser.errors.get_mut() = parsed.errors;
|
||||
let mut codegen = son::Codegen::new(&parsed.ast, &mut ctx);
|
||||
|
||||
codegen.push_embeds(parsed.embeds);
|
||||
codegen.generate(0);
|
||||
|
||||
if !codegen.errors.borrow().is_empty() {
|
||||
|
@ -108,6 +109,11 @@ pub fn run_compiler(root_file: &str, options: Options, out: &mut Vec<u8>) -> std
|
|||
codegen.assemble(out);
|
||||
}
|
||||
} else {
|
||||
if !parsed.errors.is_empty() {
|
||||
log::error!("{}", parsed.errors);
|
||||
return Err(std::io::Error::other("parsing failed"));
|
||||
}
|
||||
|
||||
let mut codegen = codegen::Codegen::default();
|
||||
codegen.files = parsed.ast;
|
||||
codegen.push_embeds(parsed.embeds);
|
||||
|
@ -213,6 +219,7 @@ impl<T> TaskQueueInner<T> {
|
|||
pub struct Loaded {
|
||||
ast: Vec<Ast>,
|
||||
embeds: Vec<Vec<u8>>,
|
||||
errors: String,
|
||||
}
|
||||
|
||||
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||
|
@ -334,7 +341,7 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
|||
};
|
||||
|
||||
let thread = || {
|
||||
let mut ctx = ParserCtx::default();
|
||||
let mut ctx = Ctx::default();
|
||||
let mut tmp = PathBuf::new();
|
||||
while let Some(task @ (indx, ..)) = tasks.pop() {
|
||||
let res = execute_task(&mut ctx, task, &mut tmp);
|
||||
|
@ -343,6 +350,7 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
|||
ast.resize_with(len, || Err(io::ErrorKind::InvalidData.into()));
|
||||
ast[indx as usize] = res;
|
||||
}
|
||||
ctx.errors.into_inner()
|
||||
};
|
||||
|
||||
let path = Path::new(root).canonicalize().map_err(|e| {
|
||||
|
@ -351,15 +359,23 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
|||
seen_modules.lock().unwrap().insert(path.clone(), 0);
|
||||
tasks.push((0, path));
|
||||
|
||||
if extra_threads == 0 {
|
||||
thread();
|
||||
let errors = if extra_threads == 0 {
|
||||
thread()
|
||||
} else {
|
||||
std::thread::scope(|s| (0..extra_threads + 1).for_each(|_| _ = s.spawn(thread)));
|
||||
}
|
||||
std::thread::scope(|s| {
|
||||
(0..extra_threads + 1)
|
||||
.map(|_| s.spawn(thread))
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.map(|t| t.join().unwrap())
|
||||
.collect::<String>()
|
||||
})
|
||||
};
|
||||
|
||||
Ok(Loaded {
|
||||
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
|
||||
embeds: embeds.into_inner().unwrap(),
|
||||
errors,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -51,6 +51,8 @@ macro_rules! gen_token_kind {
|
|||
}
|
||||
|
||||
impl $name {
|
||||
pub const OPS: &[Self] = &[$($(Self::$op),*),*];
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
let sf = unsafe { &*(self as *const _ as *const u8) } ;
|
||||
match *self {
|
||||
|
@ -279,11 +281,16 @@ impl TokenKind {
|
|||
Self::Shl => a.wrapping_shl(b as _),
|
||||
Self::Eq => (a == b) as i64,
|
||||
Self::Ne => (a != b) as i64,
|
||||
Self::Lt => (a < b) as i64,
|
||||
Self::Gt => (a > b) as i64,
|
||||
Self::Le => (a >= b) as i64,
|
||||
Self::Ge => (a <= b) as i64,
|
||||
Self::Band => a & b,
|
||||
Self::Bor => a | b,
|
||||
Self::Xor => a ^ b,
|
||||
Self::Mod => a % b,
|
||||
Self::Shr => a >> b,
|
||||
Self::Mod if b == 0 => 0,
|
||||
Self::Mod => a.wrapping_rem(b),
|
||||
Self::Shr => a.wrapping_shr(b as _),
|
||||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
@ -316,6 +323,17 @@ impl TokenKind {
|
|||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn closing(&self) -> Option<TokenKind> {
|
||||
Some(match self {
|
||||
Self::Ctor => Self::RBrace,
|
||||
Self::Tupl => Self::RParen,
|
||||
Self::LParen => Self::RParen,
|
||||
Self::LBrack => Self::RBrack,
|
||||
Self::LBrace => Self::RBrace,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
gen_token_kind! {
|
||||
|
|
|
@ -930,6 +930,11 @@ impl IdentInterner {
|
|||
fn project(&self, ident: &str) -> Option<Ident> {
|
||||
self.lookup.get(ident, &self.strings).copied()
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
self.lookup.clear();
|
||||
self.strings.clear()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
|
@ -946,20 +951,36 @@ pub struct TypeIns {
|
|||
funcs: Vec<Func>,
|
||||
args: Vec<ty::Id>,
|
||||
globals: Vec<Global>,
|
||||
// TODO: use ctx map
|
||||
strings: HashMap<Vec<u8>, ty::Global>,
|
||||
structs: Vec<Struct>,
|
||||
fields: Vec<Field>,
|
||||
ptrs: Vec<Ptr>,
|
||||
slices: Vec<Array>,
|
||||
}
|
||||
|
||||
struct FTask {
|
||||
file: FileId,
|
||||
id: ty::Func,
|
||||
}
|
||||
|
||||
struct StringRef(ty::Global);
|
||||
|
||||
impl ctx_map::CtxEntry for StringRef {
|
||||
type Ctx = [Global];
|
||||
type Key<'a> = &'a [u8];
|
||||
|
||||
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
|
||||
&ctx[self.0 as usize].data
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Types {
|
||||
syms: ctx_map::CtxMap<ty::Id>,
|
||||
names: IdentInterner,
|
||||
strings: ctx_map::CtxMap<StringRef>,
|
||||
ins: TypeIns,
|
||||
tmp: TypesTmp,
|
||||
tasks: Vec<Option<FTask>>,
|
||||
}
|
||||
|
||||
const HEADER_SIZE: usize = core::mem::size_of::<AbleOsExecutableHeader>();
|
||||
|
@ -1444,6 +1465,28 @@ impl Types {
|
|||
let name = self.names.project(name)?;
|
||||
self.struct_fields(s).iter().position(|f| f.name == name)
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
self.syms.clear();
|
||||
self.names.clear();
|
||||
self.strings.clear();
|
||||
|
||||
self.ins.funcs.clear();
|
||||
self.ins.args.clear();
|
||||
self.ins.globals.clear();
|
||||
self.ins.structs.clear();
|
||||
self.ins.fields.clear();
|
||||
self.ins.ptrs.clear();
|
||||
self.ins.slices.clear();
|
||||
|
||||
debug_assert_eq!(self.tmp.fields.len(), 0);
|
||||
debug_assert_eq!(self.tmp.frontier.len(), 0);
|
||||
debug_assert_eq!(self.tmp.globals.len(), 0);
|
||||
debug_assert_eq!(self.tmp.funcs.len(), 0);
|
||||
debug_assert_eq!(self.tmp.args.len(), 0);
|
||||
|
||||
debug_assert_eq!(self.tasks.len(), 0);
|
||||
}
|
||||
}
|
||||
|
||||
struct OffsetIter {
|
||||
|
@ -1559,6 +1602,10 @@ impl Comptime {
|
|||
fn pop_pc(&mut self, prev_pc: hbvm::mem::Address) {
|
||||
self.vm.pc = prev_pc + self.code.as_ptr() as usize;
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
self.code.clear();
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Comptime {
|
||||
|
@ -1641,13 +1688,17 @@ pub fn run_test(
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn test_parse_files(ident: &'static str, input: &'static str) -> (Vec<parser::Ast>, Vec<Vec<u8>>) {
|
||||
fn test_parse_files(
|
||||
ident: &str,
|
||||
input: &str,
|
||||
ctx: &mut parser::Ctx,
|
||||
) -> (Vec<parser::Ast>, Vec<Vec<u8>>) {
|
||||
use {
|
||||
self::parser::FileKind,
|
||||
std::{borrow::ToOwned, string::ToString},
|
||||
};
|
||||
|
||||
fn find_block(mut input: &'static str, test_name: &'static str) -> &'static str {
|
||||
fn find_block<'a>(mut input: &'a str, test_name: &str) -> &'a str {
|
||||
const CASE_PREFIX: &str = "#### ";
|
||||
const CASE_SUFFIX: &str = "\n```hb";
|
||||
loop {
|
||||
|
@ -1707,13 +1758,10 @@ fn test_parse_files(ident: &'static str, input: &'static str) -> (Vec<parser::As
|
|||
.ok_or("Embed Not Found".to_string()),
|
||||
};
|
||||
|
||||
let mut ctx = parser::ParserCtx::default();
|
||||
(
|
||||
module_map
|
||||
.iter()
|
||||
.map(|&(path, content)| {
|
||||
parser::Ast::new(path, content.to_owned(), &mut ctx, &mut loader)
|
||||
})
|
||||
.map(|&(path, content)| parser::Ast::new(path, content.to_owned(), ctx, &mut loader))
|
||||
.collect(),
|
||||
embed_map.iter().map(|&(_, content)| content.to_owned().into_bytes()).collect(),
|
||||
)
|
||||
|
|
|
@ -7,7 +7,7 @@ use {
|
|||
alloc::{boxed::Box, string::String, vec::Vec},
|
||||
core::{
|
||||
alloc::Layout,
|
||||
cell::UnsafeCell,
|
||||
cell::{RefCell, UnsafeCell},
|
||||
fmt::{self},
|
||||
intrinsics::unlikely,
|
||||
marker::PhantomData,
|
||||
|
@ -19,7 +19,6 @@ use {
|
|||
|
||||
pub type Pos = u32;
|
||||
pub type IdentFlags = u32;
|
||||
pub type Symbols = Vec<Symbol>;
|
||||
pub type FileId = u32;
|
||||
pub type IdentIndex = u16;
|
||||
pub type LoaderError = String;
|
||||
|
@ -31,6 +30,20 @@ pub enum FileKind {
|
|||
Embed,
|
||||
}
|
||||
|
||||
trait Trans {
|
||||
fn trans(self) -> Self;
|
||||
}
|
||||
|
||||
impl<T> Trans for Option<Option<T>> {
|
||||
fn trans(self) -> Self {
|
||||
match self {
|
||||
Some(None) => None,
|
||||
Some(Some(v)) => Some(Some(v)),
|
||||
None => Some(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const SOURCE_TO_AST_FACTOR: usize = 7 * (core::mem::size_of::<usize>() / 4) + 1;
|
||||
|
||||
pub mod idfl {
|
||||
|
@ -73,7 +86,7 @@ pub struct Parser<'a, 'b> {
|
|||
loader: Loader<'b>,
|
||||
lexer: Lexer<'a>,
|
||||
arena: &'a Arena,
|
||||
ctx: &'b mut ParserCtx,
|
||||
ctx: &'b mut Ctx,
|
||||
token: Token,
|
||||
ns_bound: usize,
|
||||
trailing_sep: bool,
|
||||
|
@ -82,7 +95,7 @@ pub struct Parser<'a, 'b> {
|
|||
|
||||
impl<'a, 'b> Parser<'a, 'b> {
|
||||
pub fn parse(
|
||||
ctx: &'b mut ParserCtx,
|
||||
ctx: &'b mut Ctx,
|
||||
input: &'a str,
|
||||
path: &'b str,
|
||||
loader: Loader<'b>,
|
||||
|
@ -110,23 +123,17 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
|
||||
if !self.ctx.idents.is_empty() {
|
||||
// TODO: we need error recovery
|
||||
log::error!("{}", {
|
||||
let mut errors = String::new();
|
||||
for id in self.ctx.idents.drain(..) {
|
||||
report_to(
|
||||
self.lexer.source(),
|
||||
self.path,
|
||||
ident::pos(id.ident),
|
||||
&format_args!(
|
||||
"undeclared identifier: {}",
|
||||
self.lexer.slice(ident::range(id.ident))
|
||||
),
|
||||
&mut errors,
|
||||
);
|
||||
}
|
||||
errors
|
||||
});
|
||||
unreachable!();
|
||||
let mut idents = core::mem::take(&mut self.ctx.idents);
|
||||
for id in idents.drain(..) {
|
||||
self.report(
|
||||
ident::pos(id.ident),
|
||||
format_args!(
|
||||
"undeclared identifier: {}",
|
||||
self.lexer.slice(ident::range(id.ident))
|
||||
),
|
||||
);
|
||||
}
|
||||
self.ctx.idents = idents;
|
||||
}
|
||||
|
||||
f
|
||||
|
@ -136,20 +143,20 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
core::mem::replace(&mut self.token, self.lexer.eat())
|
||||
}
|
||||
|
||||
fn ptr_expr(&mut self) -> &'a Expr<'a> {
|
||||
self.arena.alloc(self.expr())
|
||||
fn ptr_expr(&mut self) -> Option<&'a Expr<'a>> {
|
||||
Some(self.arena.alloc(self.expr()?))
|
||||
}
|
||||
|
||||
fn expr_low(&mut self, top_level: bool) -> Expr<'a> {
|
||||
let left = self.unit_expr();
|
||||
fn expr_low(&mut self, top_level: bool) -> Option<Expr<'a>> {
|
||||
let left = self.unit_expr()?;
|
||||
self.bin_expr(left, 0, top_level)
|
||||
}
|
||||
|
||||
fn expr(&mut self) -> Expr<'a> {
|
||||
fn expr(&mut self) -> Option<Expr<'a>> {
|
||||
self.expr_low(false)
|
||||
}
|
||||
|
||||
fn bin_expr(&mut self, mut fold: Expr<'a>, min_prec: u8, top_level: bool) -> Expr<'a> {
|
||||
fn bin_expr(&mut self, mut fold: Expr<'a>, min_prec: u8, top_level: bool) -> Option<Expr<'a>> {
|
||||
loop {
|
||||
let Some(prec) = self.token.kind.precedence() else {
|
||||
break;
|
||||
|
@ -165,8 +172,8 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
self.declare_rec(&fold, top_level);
|
||||
}
|
||||
|
||||
let right = self.unit_expr();
|
||||
let right = self.bin_expr(right, prec, false);
|
||||
let right = self.unit_expr()?;
|
||||
let right = self.bin_expr(right, prec, false)?;
|
||||
let right = self.arena.alloc(right);
|
||||
let left = self.arena.alloc(fold);
|
||||
|
||||
|
@ -187,7 +194,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
fold
|
||||
Some(fold)
|
||||
}
|
||||
|
||||
fn declare_rec(&mut self, expr: &Expr, top_level: bool) {
|
||||
|
@ -200,7 +207,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
self.declare_rec(value, top_level)
|
||||
}
|
||||
}
|
||||
_ => self.report(expr.pos(), "cant declare this shit (yet)"),
|
||||
_ => _ = self.report(expr.pos(), "cant declare this shit (yet)"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -217,12 +224,14 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
|
||||
let Ok(index) = self.ctx.idents.binary_search_by_key(&id, |s| s.ident) else {
|
||||
self.report(pos, "the identifier is rezerved for a builtin (proably)");
|
||||
return;
|
||||
};
|
||||
if core::mem::replace(&mut self.ctx.idents[index].declared, true) {
|
||||
self.report(
|
||||
pos,
|
||||
format_args!("redeclaration of identifier: {}", self.lexer.slice(ident::range(id))),
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
self.ctx.idents[index].ordered = ordered;
|
||||
|
@ -245,11 +254,16 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
{
|
||||
Some((i, elem)) => (i, elem, false),
|
||||
None => {
|
||||
let Some(id) = ident::new(token.start, name.len() as _) else {
|
||||
self.report(token.start, "identifier can at most have 64 characters");
|
||||
let ident = match ident::new(token.start, name.len() as _) {
|
||||
None => {
|
||||
self.report(token.start, "identifier can at most have 64 characters");
|
||||
ident::new(token.start, 64).unwrap()
|
||||
}
|
||||
Some(id) => id,
|
||||
};
|
||||
|
||||
self.ctx.idents.push(ScopeIdent {
|
||||
ident: id,
|
||||
ident,
|
||||
declared: false,
|
||||
ordered: false,
|
||||
flags: 0,
|
||||
|
@ -271,18 +285,18 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
self.lexer.slice(range.range())
|
||||
}
|
||||
|
||||
fn unit_expr(&mut self) -> Expr<'a> {
|
||||
fn unit_expr(&mut self) -> Option<Expr<'a>> {
|
||||
use {Expr as E, TokenKind as T};
|
||||
let frame = self.ctx.idents.len();
|
||||
let token @ Token { start: pos, .. } = self.next();
|
||||
let prev_boundary = self.ns_bound;
|
||||
let prev_captured = self.ctx.captured.len();
|
||||
let mut expr = match token.kind {
|
||||
T::Ct => E::Ct { pos, value: self.ptr_expr() },
|
||||
T::Ct => E::Ct { pos, value: self.ptr_expr()? },
|
||||
T::Directive if self.lexer.slice(token.range()) == "use" => {
|
||||
self.expect_advance(TokenKind::LParen);
|
||||
let str = self.expect_advance(TokenKind::DQuote);
|
||||
self.expect_advance(TokenKind::RParen);
|
||||
self.expect_advance(TokenKind::LParen)?;
|
||||
let str = self.expect_advance(TokenKind::DQuote)?;
|
||||
self.expect_advance(TokenKind::RParen)?;
|
||||
let path = self.lexer.slice(str.range());
|
||||
let path = &path[1..path.len() - 1];
|
||||
|
||||
|
@ -292,15 +306,15 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
id: match (self.loader)(path, self.path, FileKind::Module) {
|
||||
Ok(id) => id,
|
||||
Err(e) => {
|
||||
self.report(str.start, format_args!("error loading dependency: {e:#}"))
|
||||
self.report(str.start, format_args!("error loading dependency: {e:#}"))?
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
T::Directive if self.lexer.slice(token.range()) == "embed" => {
|
||||
self.expect_advance(TokenKind::LParen);
|
||||
let str = self.expect_advance(TokenKind::DQuote);
|
||||
self.expect_advance(TokenKind::RParen);
|
||||
self.expect_advance(TokenKind::LParen)?;
|
||||
let str = self.expect_advance(TokenKind::DQuote)?;
|
||||
self.expect_advance(TokenKind::RParen)?;
|
||||
let path = self.lexer.slice(str.range());
|
||||
let path = &path[1..path.len() - 1];
|
||||
|
||||
|
@ -309,8 +323,10 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
path,
|
||||
id: match (self.loader)(path, self.path, FileKind::Embed) {
|
||||
Ok(id) => id,
|
||||
Err(e) => self
|
||||
.report(str.start, format_args!("error loading embedded file: {e:#}")),
|
||||
Err(e) => self.report(
|
||||
str.start,
|
||||
format_args!("error loading embedded file: {e:#}"),
|
||||
)?,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -318,7 +334,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
pos: pos - 1, // need to undo the directive shift
|
||||
name: self.tok_str(token),
|
||||
args: {
|
||||
self.expect_advance(T::LParen);
|
||||
self.expect_advance(T::LParen)?;
|
||||
self.collect_list(T::Comma, T::RParen, Self::expr)
|
||||
},
|
||||
},
|
||||
|
@ -328,7 +344,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
T::DQuote => E::String { pos, literal: self.tok_str(token) },
|
||||
T::Packed => {
|
||||
self.packed = true;
|
||||
let expr = self.unit_expr();
|
||||
let expr = self.unit_expr()?;
|
||||
if self.packed {
|
||||
self.report(
|
||||
expr.pos(),
|
||||
|
@ -342,20 +358,20 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
packed: core::mem::take(&mut self.packed),
|
||||
fields: {
|
||||
self.ns_bound = self.ctx.idents.len();
|
||||
self.expect_advance(T::LBrace);
|
||||
self.expect_advance(T::LBrace)?;
|
||||
self.collect_list(T::Comma, T::RBrace, |s| {
|
||||
let tok = s.token;
|
||||
if s.advance_if(T::Comment) {
|
||||
Some(if s.advance_if(T::Comment) {
|
||||
CommentOr::Comment { literal: s.tok_str(tok), pos: tok.start }
|
||||
} else {
|
||||
let name = s.expect_advance(T::Ident);
|
||||
s.expect_advance(T::Colon);
|
||||
let name = s.expect_advance(T::Ident)?;
|
||||
s.expect_advance(T::Colon)?;
|
||||
CommentOr::Or(StructField {
|
||||
pos: name.start,
|
||||
name: s.tok_str(name),
|
||||
ty: s.expr(),
|
||||
ty: s.expr()?,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
captured: {
|
||||
|
@ -381,11 +397,11 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
}
|
||||
T::If => E::If {
|
||||
pos,
|
||||
cond: self.ptr_expr(),
|
||||
then: self.ptr_expr(),
|
||||
else_: self.advance_if(T::Else).then(|| self.ptr_expr()),
|
||||
cond: self.ptr_expr()?,
|
||||
then: self.ptr_expr()?,
|
||||
else_: self.advance_if(T::Else).then(|| self.ptr_expr()).trans()?,
|
||||
},
|
||||
T::Loop => E::Loop { pos, body: self.ptr_expr() },
|
||||
T::Loop => E::Loop { pos, body: self.ptr_expr()? },
|
||||
T::Break => E::Break { pos },
|
||||
T::Continue => E::Continue { pos },
|
||||
T::Return => E::Return {
|
||||
|
@ -394,39 +410,40 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
self.token.kind,
|
||||
T::Semi | T::RBrace | T::RBrack | T::RParen | T::Comma
|
||||
))
|
||||
.then(|| self.ptr_expr()),
|
||||
.then(|| self.ptr_expr())
|
||||
.trans()?,
|
||||
},
|
||||
T::Fn => E::Closure {
|
||||
pos,
|
||||
args: {
|
||||
self.expect_advance(T::LParen);
|
||||
self.expect_advance(T::LParen)?;
|
||||
self.collect_list(T::Comma, T::RParen, |s| {
|
||||
let name = s.advance_ident();
|
||||
let name = s.advance_ident()?;
|
||||
let (id, _) = s.resolve_ident(name);
|
||||
s.declare(name.start, id, true, true);
|
||||
s.expect_advance(T::Colon);
|
||||
Arg {
|
||||
s.expect_advance(T::Colon)?;
|
||||
Some(Arg {
|
||||
pos: name.start,
|
||||
name: s.tok_str(name),
|
||||
is_ct: name.kind == T::CtIdent,
|
||||
id,
|
||||
ty: s.expr(),
|
||||
}
|
||||
ty: s.expr()?,
|
||||
})
|
||||
})
|
||||
},
|
||||
ret: {
|
||||
self.expect_advance(T::Colon);
|
||||
self.ptr_expr()
|
||||
self.expect_advance(T::Colon)?;
|
||||
self.ptr_expr()?
|
||||
},
|
||||
body: self.ptr_expr(),
|
||||
body: self.ptr_expr()?,
|
||||
},
|
||||
T::Ctor => self.ctor(pos, None),
|
||||
T::Tupl => self.tupl(pos, None),
|
||||
T::LBrack => E::Slice {
|
||||
item: self.ptr_unit_expr(),
|
||||
size: self.advance_if(T::Semi).then(|| self.ptr_expr()),
|
||||
item: self.ptr_unit_expr()?,
|
||||
size: self.advance_if(T::Semi).then(|| self.ptr_expr()).trans()?,
|
||||
pos: {
|
||||
self.expect_advance(T::RBrack);
|
||||
self.expect_advance(T::RBrack)?;
|
||||
pos
|
||||
},
|
||||
},
|
||||
|
@ -434,7 +451,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
pos,
|
||||
op: token.kind,
|
||||
val: {
|
||||
let expr = self.ptr_unit_expr();
|
||||
let expr = self.ptr_unit_expr()?;
|
||||
if token.kind == T::Band {
|
||||
self.flag_idents(*expr, idfl::REFERENCED);
|
||||
}
|
||||
|
@ -454,18 +471,18 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
pos,
|
||||
value: match u64::from_str_radix(slice, radix as u32) {
|
||||
Ok(value) => value,
|
||||
Err(e) => self.report(token.start, format_args!("invalid number: {e}")),
|
||||
Err(e) => self.report(token.start, format_args!("invalid number: {e}"))?,
|
||||
} as i64,
|
||||
radix,
|
||||
}
|
||||
}
|
||||
T::LParen => {
|
||||
let expr = self.expr();
|
||||
self.expect_advance(T::RParen);
|
||||
let expr = self.expr()?;
|
||||
self.expect_advance(T::RParen)?;
|
||||
expr
|
||||
}
|
||||
T::Comment => Expr::Comment { pos, literal: self.tok_str(token) },
|
||||
tok => self.report(token.start, format_args!("unexpected token: {tok}")),
|
||||
tok => self.report(token.start, format_args!("unexpected token: {tok}"))?,
|
||||
};
|
||||
|
||||
loop {
|
||||
|
@ -485,8 +502,8 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
T::LBrack => E::Index {
|
||||
base: self.arena.alloc(expr),
|
||||
index: {
|
||||
let index = self.expr();
|
||||
self.expect_advance(T::RBrack);
|
||||
let index = self.expr()?;
|
||||
self.expect_advance(T::RBrack)?;
|
||||
self.arena.alloc(index)
|
||||
},
|
||||
},
|
||||
|
@ -494,7 +511,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
target: self.arena.alloc(expr),
|
||||
pos: token.start,
|
||||
name: {
|
||||
let token = self.expect_advance(T::Ident);
|
||||
let token = self.expect_advance(T::Ident)?;
|
||||
self.tok_str(token)
|
||||
},
|
||||
},
|
||||
|
@ -506,7 +523,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
self.pop_scope(frame);
|
||||
}
|
||||
|
||||
expr
|
||||
Some(expr)
|
||||
}
|
||||
|
||||
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>) -> Expr<'a> {
|
||||
|
@ -523,31 +540,29 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
pos,
|
||||
ty: ty.map(|ty| self.arena.alloc(ty)),
|
||||
fields: self.collect_list(TokenKind::Comma, TokenKind::RBrace, |s| {
|
||||
let name_tok = s.advance_ident();
|
||||
let name_tok = s.advance_ident()?;
|
||||
let name = s.tok_str(name_tok);
|
||||
CtorField {
|
||||
Some(CtorField {
|
||||
pos: name_tok.start,
|
||||
name,
|
||||
value: if s.advance_if(TokenKind::Colon) {
|
||||
s.expr()
|
||||
s.expr()?
|
||||
} else {
|
||||
let (id, is_first) = s.resolve_ident(name_tok);
|
||||
Expr::Ident { pos: name_tok.start, is_ct: false, id, is_first }
|
||||
},
|
||||
}
|
||||
})
|
||||
}),
|
||||
trailing_comma: core::mem::take(&mut self.trailing_sep),
|
||||
}
|
||||
}
|
||||
|
||||
fn advance_ident(&mut self) -> Token {
|
||||
if matches!(self.token.kind, TokenKind::Ident | TokenKind::CtIdent) {
|
||||
self.next()
|
||||
fn advance_ident(&mut self) -> Option<Token> {
|
||||
let next = self.next();
|
||||
if matches!(next.kind, TokenKind::Ident | TokenKind::CtIdent) {
|
||||
Some(next)
|
||||
} else {
|
||||
self.report(
|
||||
self.token.start,
|
||||
format_args!("expected identifier, found {}", self.token.kind),
|
||||
)
|
||||
self.report(self.token.start, format_args!("expected identifier, found {}", next.kind))?
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -567,20 +582,49 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
.collect_into(&mut self.ctx.symbols);
|
||||
}
|
||||
|
||||
fn ptr_unit_expr(&mut self) -> &'a Expr<'a> {
|
||||
self.arena.alloc(self.unit_expr())
|
||||
fn ptr_unit_expr(&mut self) -> Option<&'a Expr<'a>> {
|
||||
Some(self.arena.alloc(self.unit_expr()?))
|
||||
}
|
||||
|
||||
fn collect_list<T: Copy>(
|
||||
&mut self,
|
||||
delim: TokenKind,
|
||||
end: TokenKind,
|
||||
mut f: impl FnMut(&mut Self) -> T,
|
||||
mut f: impl FnMut(&mut Self) -> Option<T>,
|
||||
) -> &'a [T] {
|
||||
let mut trailing_sep = false;
|
||||
let mut view = self.ctx.stack.view();
|
||||
while !self.advance_if(end) {
|
||||
let val = f(self);
|
||||
'o: while !self.advance_if(end) {
|
||||
let val = match f(self) {
|
||||
Some(val) => val,
|
||||
None => {
|
||||
let mut paren = None::<TokenKind>;
|
||||
let mut depth = 0;
|
||||
loop {
|
||||
let tok = self.next();
|
||||
if tok.kind == TokenKind::Eof {
|
||||
break 'o;
|
||||
}
|
||||
if let Some(par) = paren {
|
||||
if par == tok.kind {
|
||||
depth += 1;
|
||||
} else if tok.kind.closing() == par.closing() {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
paren = None;
|
||||
}
|
||||
}
|
||||
} else if tok.kind == delim {
|
||||
continue 'o;
|
||||
} else if tok.kind == end {
|
||||
break 'o;
|
||||
} else if tok.kind.closing().is_some() && paren.is_none() {
|
||||
paren = Some(tok.kind);
|
||||
depth = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
trailing_sep = self.advance_if(delim);
|
||||
unsafe { self.ctx.stack.push(&mut view, val) };
|
||||
}
|
||||
|
@ -597,20 +641,28 @@ impl<'a, 'b> Parser<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
fn expect_advance(&mut self, kind: TokenKind) -> Token {
|
||||
if self.token.kind != kind {
|
||||
self.report(
|
||||
self.token.start,
|
||||
format_args!("expected {}, found {}", kind, self.token.kind),
|
||||
);
|
||||
#[must_use]
|
||||
fn expect_advance(&mut self, kind: TokenKind) -> Option<Token> {
|
||||
let next = self.next();
|
||||
if next.kind != kind {
|
||||
self.report(next.start, format_args!("expected {}, found {}", kind, next.kind))?
|
||||
} else {
|
||||
Some(next)
|
||||
}
|
||||
self.next()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn report(&self, pos: Pos, msg: impl fmt::Display) -> ! {
|
||||
log::error!("{}", Report::new(self.lexer.source(), self.path, pos, msg));
|
||||
unreachable!();
|
||||
fn report(&mut self, pos: Pos, msg: impl fmt::Display) -> Option<!> {
|
||||
if log::log_enabled!(log::Level::Error) {
|
||||
use core::fmt::Write;
|
||||
writeln!(
|
||||
self.ctx.errors.get_mut(),
|
||||
"{}",
|
||||
Report::new(self.lexer.source(), self.path, pos, msg)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn flag_idents(&mut self, e: Expr<'a>, flags: IdentFlags) {
|
||||
|
@ -988,13 +1040,25 @@ impl core::fmt::Display for Display<'_> {
|
|||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ParserCtx {
|
||||
symbols: Symbols,
|
||||
pub struct Ctx {
|
||||
pub errors: RefCell<String>,
|
||||
symbols: Vec<Symbol>,
|
||||
stack: StackAlloc,
|
||||
idents: Vec<ScopeIdent>,
|
||||
captured: Vec<Ident>,
|
||||
}
|
||||
|
||||
impl Ctx {
|
||||
pub fn clear(&mut self) {
|
||||
self.errors.get_mut().clear();
|
||||
|
||||
debug_assert_eq!(self.symbols.len(), 0);
|
||||
debug_assert_eq!(self.stack.len, 0);
|
||||
debug_assert_eq!(self.idents.len(), 0);
|
||||
debug_assert_eq!(self.captured.len(), 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct AstInner<T: ?Sized> {
|
||||
ref_count: AtomicUsize,
|
||||
|
@ -1014,12 +1078,12 @@ impl AstInner<[Symbol]> {
|
|||
.0
|
||||
}
|
||||
|
||||
fn new(file: Box<str>, path: &str, ctx: &mut ParserCtx, loader: Loader) -> NonNull<Self> {
|
||||
fn new(file: Box<str>, path: Box<str>, ctx: &mut Ctx, loader: Loader) -> NonNull<Self> {
|
||||
let arena = Arena::with_capacity(
|
||||
SOURCE_TO_AST_FACTOR * file.bytes().filter(|b| !b.is_ascii_whitespace()).count(),
|
||||
);
|
||||
let exprs =
|
||||
unsafe { core::mem::transmute(Parser::parse(ctx, &file, path, loader, &arena)) };
|
||||
unsafe { core::mem::transmute(Parser::parse(ctx, &file, &path, loader, &arena)) };
|
||||
|
||||
crate::quad_sort(&mut ctx.symbols, |a, b| a.name.cmp(&b.name));
|
||||
|
||||
|
@ -1033,13 +1097,14 @@ impl AstInner<[Symbol]> {
|
|||
ref_count: AtomicUsize::new(1),
|
||||
mem: arena.chunk.into_inner(),
|
||||
exprs,
|
||||
path: path.into(),
|
||||
path,
|
||||
file,
|
||||
symbols: (),
|
||||
});
|
||||
core::ptr::addr_of_mut!((*inner).symbols)
|
||||
.as_mut_ptr()
|
||||
.copy_from_nonoverlapping(ctx.symbols.as_ptr(), ctx.symbols.len());
|
||||
ctx.symbols.clear();
|
||||
|
||||
NonNull::new_unchecked(inner)
|
||||
}
|
||||
|
@ -1090,8 +1155,13 @@ fn report_to(file: &str, path: &str, pos: Pos, msg: &dyn fmt::Display, out: &mut
|
|||
pub struct Ast(NonNull<AstInner<[Symbol]>>);
|
||||
|
||||
impl Ast {
|
||||
pub fn new(path: &str, content: String, ctx: &mut ParserCtx, loader: Loader) -> Self {
|
||||
Self(AstInner::new(content.into(), path, ctx, loader))
|
||||
pub fn new(
|
||||
path: impl Into<Box<str>>,
|
||||
content: impl Into<Box<str>>,
|
||||
ctx: &mut Ctx,
|
||||
loader: Loader,
|
||||
) -> Self {
|
||||
Self(AstInner::new(content.into(), path.into(), ctx, loader))
|
||||
}
|
||||
|
||||
pub fn exprs(&self) -> &[Expr] {
|
||||
|
@ -1118,7 +1188,7 @@ impl Ast {
|
|||
|
||||
impl Default for Ast {
|
||||
fn default() -> Self {
|
||||
Self(AstInner::new("".into(), "", &mut ParserCtx::default(), &mut no_loader))
|
||||
Self(AstInner::new("".into(), "".into(), &mut Ctx::default(), &mut no_loader))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1327,6 +1397,15 @@ impl Arena {
|
|||
|
||||
chunk.alloc(layout).unwrap()
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
let size = self.chunk.get_mut().size();
|
||||
if self.chunk.get_mut().next().is_some() {
|
||||
self.chunk = ArenaChunk::new(size + 1024, Default::default()).into();
|
||||
} else {
|
||||
self.chunk.get_mut().reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ArenaChunk {
|
||||
|
@ -1385,6 +1464,10 @@ impl ArenaChunk {
|
|||
pub fn size(&self) -> usize {
|
||||
self.base as usize + self.size - self.end as usize + self.next().map_or(0, Self::size)
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.end = unsafe { self.base.add(self.size) };
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for ArenaChunk {
|
||||
|
|
401
lang/src/son.rs
401
lang/src/son.rs
|
@ -14,8 +14,8 @@ use {
|
|||
reg, task,
|
||||
ty::{self, Arg, ArrayLen, Loc, Tuple},
|
||||
vc::{BitSet, Vc},
|
||||
Comptime, Func, Global, HashMap, Offset, OffsetIter, PLoc, Reloc, Sig, SymKey, TypeParser,
|
||||
TypedReloc, Types,
|
||||
Comptime, FTask, Func, Global, HashMap, Offset, OffsetIter, PLoc, Reloc, Sig, StringRef,
|
||||
SymKey, TypeParser, TypedReloc, Types,
|
||||
},
|
||||
alloc::{borrow::ToOwned, string::String, vec::Vec},
|
||||
core::{
|
||||
|
@ -1707,11 +1707,6 @@ fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) {
|
|||
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
|
||||
}
|
||||
|
||||
struct FTask {
|
||||
file: FileId,
|
||||
id: ty::Func,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct Ctx {
|
||||
ty: Option<ty::Id>,
|
||||
|
@ -1727,6 +1722,9 @@ impl Ctx {
|
|||
struct Pool {
|
||||
cis: Vec<ItemCtx>,
|
||||
used_cis: usize,
|
||||
|
||||
#[expect(dead_code)]
|
||||
ralloc: Regalloc,
|
||||
}
|
||||
|
||||
impl Pool {
|
||||
|
@ -1766,6 +1764,10 @@ impl Pool {
|
|||
dst.scope.clear(&mut dst.nodes);
|
||||
*dst = core::mem::take(&mut self.cis[self.used_cis]);
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
debug_assert_eq!(self.used_cis, 0);
|
||||
}
|
||||
}
|
||||
|
||||
struct Regalloc {
|
||||
|
@ -1827,109 +1829,49 @@ impl Value {
|
|||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Codegen<'a> {
|
||||
pub files: &'a [parser::Ast],
|
||||
pub errors: RefCell<String>,
|
||||
|
||||
tasks: Vec<Option<FTask>>,
|
||||
pub struct CodegenCtx {
|
||||
pub parser: parser::Ctx,
|
||||
tys: Types,
|
||||
ci: ItemCtx,
|
||||
pool: Pool,
|
||||
#[expect(dead_code)]
|
||||
ralloc: Regalloc,
|
||||
ct: Comptime,
|
||||
}
|
||||
|
||||
impl TypeParser for Codegen<'_> {
|
||||
fn tys(&mut self) -> &mut Types {
|
||||
&mut self.tys
|
||||
}
|
||||
|
||||
fn eval_const(&mut self, file: FileId, expr: &Expr, ret: ty::Id) -> u64 {
|
||||
let mut scope = core::mem::take(&mut self.ci.scope.vars);
|
||||
self.pool.push_ci(file, Some(ret), self.tasks.len(), &mut self.ci);
|
||||
self.ci.scope.vars = scope;
|
||||
|
||||
let prev_err_len = self.errors.borrow().len();
|
||||
|
||||
self.expr(&Expr::Return { pos: expr.pos(), val: Some(expr) });
|
||||
|
||||
scope = core::mem::take(&mut self.ci.scope.vars);
|
||||
self.ci.finalize();
|
||||
|
||||
let res = if self.errors.borrow().len() == prev_err_len {
|
||||
self.emit_and_eval(file, ret, &mut [])
|
||||
} else {
|
||||
1
|
||||
};
|
||||
|
||||
self.pool.pop_ci(&mut self.ci);
|
||||
self.ci.scope.vars = scope;
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn infer_type(&mut self, expr: &Expr) -> ty::Id {
|
||||
self.pool.save_ci(&self.ci);
|
||||
let ty = self.expr(expr).map_or(ty::Id::NEVER, |v| v.ty);
|
||||
self.pool.restore_ci(&mut self.ci);
|
||||
ty
|
||||
}
|
||||
|
||||
fn on_reuse(&mut self, existing: ty::Id) {
|
||||
if let ty::Kind::Func(id) = existing.expand()
|
||||
&& let func = &mut self.tys.ins.funcs[id as usize]
|
||||
&& let Err(idx) = task::unpack(func.offset)
|
||||
&& idx < self.tasks.len()
|
||||
{
|
||||
func.offset = task::id(self.tasks.len());
|
||||
let task = self.tasks[idx].take();
|
||||
self.tasks.push(task);
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_global(&mut self, file: FileId, name: Ident, expr: &Expr) -> ty::Id {
|
||||
let gid = self.tys.ins.globals.len() as ty::Global;
|
||||
self.tys.ins.globals.push(Global { file, name, ..Default::default() });
|
||||
|
||||
let ty = ty::Kind::Global(gid);
|
||||
self.pool.push_ci(file, None, self.tasks.len(), &mut self.ci);
|
||||
let prev_err_len = self.errors.borrow().len();
|
||||
|
||||
self.expr(&(Expr::Return { pos: expr.pos(), val: Some(expr) }));
|
||||
|
||||
self.ci.finalize();
|
||||
|
||||
let ret = self.ci.ret.expect("for return type to be infered");
|
||||
if self.errors.borrow().len() == prev_err_len {
|
||||
let mut mem = vec![0u8; self.tys.size_of(ret) as usize];
|
||||
self.emit_and_eval(file, ret, &mut mem);
|
||||
self.tys.ins.globals[gid as usize].data = mem;
|
||||
}
|
||||
|
||||
self.pool.pop_ci(&mut self.ci);
|
||||
self.tys.ins.globals[gid as usize].ty = ret;
|
||||
|
||||
ty.compress()
|
||||
}
|
||||
|
||||
fn report(&self, pos: Pos, msg: impl Display) -> ty::Id {
|
||||
self.report(pos, msg);
|
||||
ty::Id::NEVER
|
||||
}
|
||||
|
||||
fn find_local_ty(&mut self, ident: Ident) -> Option<ty::Id> {
|
||||
self.ci.scope.vars.iter().rfind(|v| (v.id == ident && v.value() == NEVER)).map(|v| v.ty)
|
||||
impl CodegenCtx {
|
||||
pub fn clear(&mut self) {
|
||||
self.parser.clear();
|
||||
self.tys.clear();
|
||||
self.pool.clear();
|
||||
self.ct.clear();
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Codegen<'a> {
|
||||
pub files: &'a [parser::Ast],
|
||||
pub errors: &'a RefCell<String>,
|
||||
tys: &'a mut Types,
|
||||
ci: ItemCtx,
|
||||
pool: &'a mut Pool,
|
||||
ct: &'a mut Comptime,
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> {
|
||||
pub fn new(files: &'a [parser::Ast], ctx: &'a mut CodegenCtx) -> Self {
|
||||
Self {
|
||||
files,
|
||||
errors: &ctx.parser.errors,
|
||||
tys: &mut ctx.tys,
|
||||
ci: Default::default(),
|
||||
pool: &mut ctx.pool,
|
||||
ct: &mut ctx.ct,
|
||||
}
|
||||
}
|
||||
|
||||
fn emit_and_eval(&mut self, file: FileId, ret: ty::Id, ret_loc: &mut [u8]) -> u64 {
|
||||
if !self.complete_call_graph() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
self.ci.emit_body(&mut self.tys, self.files, Sig { args: Tuple::empty(), ret });
|
||||
self.ci.emit_body(self.tys, self.files, Sig { args: Tuple::empty(), ret });
|
||||
self.ci.code.truncate(self.ci.code.len() - instrs::jala(0, 0, 0).0);
|
||||
self.ci.emit(instrs::tx());
|
||||
|
||||
|
@ -2006,12 +1948,12 @@ impl<'a> Codegen<'a> {
|
|||
debug_assert_ne!(region, VOID);
|
||||
debug_assert_ne!({ self.ci.nodes[region].ty }, ty::Id::VOID, "{:?}", {
|
||||
self.ci.nodes[region].lock_rc = Nid::MAX;
|
||||
self.ci.nodes.graphviz_in_browser(&self.tys, self.files);
|
||||
self.ci.nodes.graphviz_in_browser(self.tys, self.files);
|
||||
});
|
||||
debug_assert!(
|
||||
self.ci.nodes[region].kind != Kind::Load || self.ci.nodes[region].ty.is_pointer(),
|
||||
"{:?} {} {}",
|
||||
self.ci.nodes.graphviz_in_browser(&self.tys, self.files),
|
||||
self.ci.nodes.graphviz_in_browser(self.tys, self.files),
|
||||
self.cfile().path,
|
||||
self.ty_display(self.ci.nodes[region].ty)
|
||||
);
|
||||
|
@ -2043,8 +1985,8 @@ impl<'a> Codegen<'a> {
|
|||
fn make_func_reachable(&mut self, func: ty::Func) {
|
||||
let fuc = &mut self.tys.ins.funcs[func as usize];
|
||||
if fuc.offset == u32::MAX {
|
||||
fuc.offset = task::id(self.tasks.len() as _);
|
||||
self.tasks.push(Some(FTask { file: fuc.file, id: func }));
|
||||
fuc.offset = task::id(self.tys.tasks.len() as _);
|
||||
self.tys.tasks.push(Some(FTask { file: fuc.file, id: func }));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2114,12 +2056,18 @@ impl<'a> Codegen<'a> {
|
|||
crate::endoce_string(literal, &mut data, report).unwrap();
|
||||
|
||||
let ty = self.tys.make_ptr(ty::Id::U8);
|
||||
let global = match self.tys.ins.strings.entry(data.clone()) {
|
||||
hash_map::Entry::Occupied(occupied_entry) => *occupied_entry.get(),
|
||||
hash_map::Entry::Vacant(vacant_entry) => {
|
||||
let global = match self.tys.strings.entry(&data, &self.tys.ins.globals) {
|
||||
(hash_map::RawEntryMut::Occupied(occupied_entry), _) => {
|
||||
occupied_entry.get_key_value().0.value.0
|
||||
}
|
||||
(hash_map::RawEntryMut::Vacant(vacant_entry), hash) => {
|
||||
let global = self.tys.ins.globals.len() as ty::Global;
|
||||
self.tys.ins.globals.push(Global { data, ty, ..Default::default() });
|
||||
*vacant_entry.insert(global)
|
||||
vacant_entry
|
||||
.insert(crate::ctx_map::Key { value: StringRef(global), hash }, ())
|
||||
.0
|
||||
.value
|
||||
.0
|
||||
}
|
||||
};
|
||||
let global = self.ci.nodes.new_node(ty, Kind::Global { global }, [VOID]);
|
||||
|
@ -2204,7 +2152,7 @@ impl<'a> Codegen<'a> {
|
|||
return Value::NEVER;
|
||||
};
|
||||
|
||||
let Some((offset, ty)) = OffsetIter::offset_of(&self.tys, s, name) else {
|
||||
let Some((offset, ty)) = OffsetIter::offset_of(self.tys, s, name) else {
|
||||
let field_list = self
|
||||
.tys
|
||||
.struct_fields(s)
|
||||
|
@ -2267,7 +2215,7 @@ impl<'a> Codegen<'a> {
|
|||
}
|
||||
Expr::BinOp { left, op: TokenKind::Decl, right, .. } => {
|
||||
let mut right = self.expr(right)?;
|
||||
if right.ty.loc(&self.tys) == Loc::Stack {
|
||||
if right.ty.loc(self.tys) == Loc::Stack {
|
||||
let stck = self.ci.nodes.new_node_nop(right.ty, Kind::Stck, [VOID, MEM]);
|
||||
self.store_mem(stck, right.ty, right.id);
|
||||
right.id = stck;
|
||||
|
@ -2424,7 +2372,7 @@ impl<'a> Codegen<'a> {
|
|||
);
|
||||
}
|
||||
|
||||
match ty.loc(&self.tys) {
|
||||
match ty.loc(self.tys) {
|
||||
Loc::Reg if core::mem::take(&mut val.ptr) => val.id = self.load_mem(val.id, ty),
|
||||
Loc::Stack if !val.ptr => {
|
||||
let stack = self.ci.nodes.new_node_nop(ty, Kind::Stck, [VOID, MEM]);
|
||||
|
@ -2504,7 +2452,7 @@ impl<'a> Codegen<'a> {
|
|||
let mut has_ptr_arg = false;
|
||||
for arg in args {
|
||||
let value = self.expr(arg)?;
|
||||
has_ptr_arg |= value.ty.has_pointers(&self.tys);
|
||||
has_ptr_arg |= value.ty.has_pointers(self.tys);
|
||||
self.tys.tmp.args.push(value.ty);
|
||||
debug_assert_ne!(self.ci.nodes[value.id].kind, Kind::Stre);
|
||||
self.ci.nodes.lock(value.id);
|
||||
|
@ -2532,7 +2480,7 @@ impl<'a> Codegen<'a> {
|
|||
});
|
||||
}
|
||||
|
||||
let alt_value = match ty.loc(&self.tys) {
|
||||
let alt_value = match ty.loc(self.tys) {
|
||||
Loc::Reg => None,
|
||||
Loc::Stack => {
|
||||
let stck = self.ci.nodes.new_node_nop(ty, Kind::Stck, [VOID, MEM]);
|
||||
|
@ -2550,7 +2498,6 @@ impl<'a> Codegen<'a> {
|
|||
|
||||
alt_value.or(Some(Value::new(self.ci.ctrl).ty(ty)))
|
||||
}
|
||||
//Expr::Directive { name: "inline", args: [func, args @ ..], .. }
|
||||
Expr::Call { func, args, .. } => {
|
||||
self.ci.call_count += 1;
|
||||
let ty = self.ty(func);
|
||||
|
@ -2588,11 +2535,11 @@ impl<'a> Codegen<'a> {
|
|||
let mut cargs = cargs.iter();
|
||||
let mut args = args.iter();
|
||||
let mut has_ptr_arg = false;
|
||||
while let Some(ty) = tys.next(&self.tys) {
|
||||
while let Some(ty) = tys.next(self.tys) {
|
||||
let carg = cargs.next().unwrap();
|
||||
let arg = args.next().unwrap();
|
||||
let Some(arg) = args.next() else { break };
|
||||
let Arg::Value(ty) = ty else { continue };
|
||||
has_ptr_arg |= ty.has_pointers(&self.tys);
|
||||
has_ptr_arg |= ty.has_pointers(self.tys);
|
||||
|
||||
let mut value = self.expr_ctx(arg, Ctx::default().with_ty(ty))?;
|
||||
debug_assert_ne!(self.ci.nodes[value.id].kind, Kind::Stre);
|
||||
|
@ -2621,7 +2568,7 @@ impl<'a> Codegen<'a> {
|
|||
});
|
||||
}
|
||||
|
||||
let alt_value = match sig.ret.loc(&self.tys) {
|
||||
let alt_value = match sig.ret.loc(self.tys) {
|
||||
Loc::Reg => None,
|
||||
Loc::Stack => {
|
||||
let stck = self.ci.nodes.new_node_nop(sig.ret, Kind::Stck, [VOID, MEM]);
|
||||
|
@ -2681,9 +2628,9 @@ impl<'a> Codegen<'a> {
|
|||
let mut args = args.iter();
|
||||
let mut cargs = cargs.iter();
|
||||
let var_base = self.ci.scope.vars.len();
|
||||
while let Some(aty) = tys.next(&self.tys) {
|
||||
let arg = args.next().unwrap();
|
||||
while let Some(aty) = tys.next(self.tys) {
|
||||
let carg = cargs.next().unwrap();
|
||||
let Some(arg) = args.next() else { break };
|
||||
match aty {
|
||||
Arg::Type(id) => {
|
||||
self.ci.scope.vars.push(Variable::new(
|
||||
|
@ -2769,9 +2716,9 @@ impl<'a> Codegen<'a> {
|
|||
match sty.expand() {
|
||||
ty::Kind::Struct(s) => {
|
||||
let mem = self.ci.nodes.new_node(sty, Kind::Stck, [VOID, MEM]);
|
||||
let mut offs = OffsetIter::new(s, &self.tys);
|
||||
let mut offs = OffsetIter::new(s, self.tys);
|
||||
for field in fields {
|
||||
let Some((ty, offset)) = offs.next_ty(&self.tys) else {
|
||||
let Some((ty, offset)) = offs.next_ty(self.tys) else {
|
||||
self.report(
|
||||
field.pos(),
|
||||
"this init argumen overflows the field count",
|
||||
|
@ -2787,7 +2734,7 @@ impl<'a> Codegen<'a> {
|
|||
}
|
||||
|
||||
let field_list = offs
|
||||
.into_iter(&self.tys)
|
||||
.into_iter(self.tys)
|
||||
.map(|(f, ..)| self.tys.names.ident_str(f.name))
|
||||
.intersperse(", ")
|
||||
.collect::<String>();
|
||||
|
@ -2877,8 +2824,8 @@ impl<'a> Codegen<'a> {
|
|||
};
|
||||
|
||||
// TODO: dont allocate
|
||||
let mut offs = OffsetIter::new(s, &self.tys)
|
||||
.into_iter(&self.tys)
|
||||
let mut offs = OffsetIter::new(s, self.tys)
|
||||
.into_iter(self.tys)
|
||||
.map(|(f, o)| (f.ty, o))
|
||||
.collect::<Vec<_>>();
|
||||
let mem = self.ci.nodes.new_node(sty, Kind::Stck, [VOID, MEM]);
|
||||
|
@ -3131,7 +3078,10 @@ impl<'a> Codegen<'a> {
|
|||
|
||||
Some(Value::VOID)
|
||||
}
|
||||
ref e => self.report_unhandled_ast(e, "bruh"),
|
||||
ref e => {
|
||||
self.report_unhandled_ast(e, "bruh");
|
||||
Some(Value::VOID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3144,8 +3094,8 @@ impl<'a> Codegen<'a> {
|
|||
lhs: Nid,
|
||||
rhs: Nid,
|
||||
) -> bool {
|
||||
let mut offs = OffsetIter::new(s, &self.tys);
|
||||
while let Some((ty, off)) = offs.next_ty(&self.tys) {
|
||||
let mut offs = OffsetIter::new(s, self.tys);
|
||||
while let Some((ty, off)) = offs.next_ty(self.tys) {
|
||||
let lhs = self.offset(lhs, off);
|
||||
let rhs = self.offset(rhs, off);
|
||||
let dst = self.offset(dst, off);
|
||||
|
@ -3268,7 +3218,7 @@ impl<'a> Codegen<'a> {
|
|||
};
|
||||
|
||||
for &CtorField { pos, name, ref value } in fields {
|
||||
let Some((offset, ty)) = OffsetIter::offset_of(&self.tys, idx, name) else {
|
||||
let Some((offset, ty)) = OffsetIter::offset_of(self.tys, idx, name) else {
|
||||
self.report(pos, format_args!("field not found: {name:?}"));
|
||||
continue;
|
||||
};
|
||||
|
@ -3387,8 +3337,8 @@ impl<'a> Codegen<'a> {
|
|||
|
||||
fn complete_call_graph(&mut self) -> bool {
|
||||
let prev_err_len = self.errors.borrow().len();
|
||||
while self.ci.task_base < self.tasks.len()
|
||||
&& let Some(task_slot) = self.tasks.pop()
|
||||
while self.ci.task_base < self.tys.tasks.len()
|
||||
&& let Some(task_slot) = self.tys.tasks.pop()
|
||||
{
|
||||
let Some(task) = task_slot else { continue };
|
||||
self.emit_func(task);
|
||||
|
@ -3413,7 +3363,7 @@ impl<'a> Codegen<'a> {
|
|||
|
||||
let mut tys = sig.args.args();
|
||||
let mut args = args.iter();
|
||||
while let Some(aty) = tys.next(&self.tys) {
|
||||
while let Some(aty) = tys.next(self.tys) {
|
||||
let arg = args.next().unwrap();
|
||||
match aty {
|
||||
Arg::Type(ty) => {
|
||||
|
@ -3427,12 +3377,12 @@ impl<'a> Codegen<'a> {
|
|||
}
|
||||
Arg::Value(ty) => {
|
||||
let mut deps = Vc::from([VOID]);
|
||||
if ty.loc(&self.tys) == Loc::Stack && self.tys.size_of(ty) <= 16 {
|
||||
if ty.loc(self.tys) == Loc::Stack && self.tys.size_of(ty) <= 16 {
|
||||
deps.push(MEM);
|
||||
}
|
||||
// TODO: whe we not using the deps?
|
||||
let value = self.ci.nodes.new_node_nop(ty, Kind::Arg, deps);
|
||||
let ptr = ty.loc(&self.tys) == Loc::Stack;
|
||||
let ptr = ty.loc(self.tys) == Loc::Stack;
|
||||
self.ci.scope.vars.push(Variable::new(
|
||||
arg.id,
|
||||
ty,
|
||||
|
@ -3457,7 +3407,7 @@ impl<'a> Codegen<'a> {
|
|||
self.ci.finalize();
|
||||
|
||||
if self.errors.borrow().len() == prev_err_len {
|
||||
self.ci.emit_body(&mut self.tys, self.files, sig);
|
||||
self.ci.emit_body(self.tys, self.files, sig);
|
||||
self.tys.ins.funcs[id as usize].code.append(&mut self.ci.code);
|
||||
self.tys.ins.funcs[id as usize].relocs.append(&mut self.ci.relocs);
|
||||
}
|
||||
|
@ -3470,7 +3420,7 @@ impl<'a> Codegen<'a> {
|
|||
}
|
||||
|
||||
fn ty_display(&self, ty: ty::Id) -> ty::Display {
|
||||
ty::Display::new(&self.tys, self.files, ty)
|
||||
ty::Display::new(self.tys, self.files, ty)
|
||||
}
|
||||
|
||||
fn ast_display(&self, ast: &'a Expr<'a>) -> parser::Display<'a> {
|
||||
|
@ -3563,18 +3513,96 @@ impl<'a> Codegen<'a> {
|
|||
}
|
||||
|
||||
#[track_caller]
|
||||
fn report_unhandled_ast(&self, ast: &Expr, hint: impl Display) -> ! {
|
||||
fn report_unhandled_ast(&self, ast: &Expr, hint: impl Display) {
|
||||
log::info!("{ast:#?}");
|
||||
self.fatal_report(ast.pos(), fa!("compiler does not (yet) know how to handle ({hint})"));
|
||||
self.report(ast.pos(), fa!("compiler does not (yet) know how to handle ({hint})"));
|
||||
}
|
||||
|
||||
fn cfile(&self) -> &'a parser::Ast {
|
||||
&self.files[self.ci.file as usize]
|
||||
}
|
||||
}
|
||||
|
||||
fn fatal_report(&self, pos: Pos, msg: impl Display) -> ! {
|
||||
impl TypeParser for Codegen<'_> {
|
||||
fn tys(&mut self) -> &mut Types {
|
||||
self.tys
|
||||
}
|
||||
|
||||
fn eval_const(&mut self, file: FileId, expr: &Expr, ret: ty::Id) -> u64 {
|
||||
let mut scope = core::mem::take(&mut self.ci.scope.vars);
|
||||
self.pool.push_ci(file, Some(ret), self.tys.tasks.len(), &mut self.ci);
|
||||
self.ci.scope.vars = scope;
|
||||
|
||||
let prev_err_len = self.errors.borrow().len();
|
||||
|
||||
self.expr(&Expr::Return { pos: expr.pos(), val: Some(expr) });
|
||||
|
||||
scope = core::mem::take(&mut self.ci.scope.vars);
|
||||
self.ci.finalize();
|
||||
|
||||
let res = if self.errors.borrow().len() == prev_err_len {
|
||||
self.emit_and_eval(file, ret, &mut [])
|
||||
} else {
|
||||
1
|
||||
};
|
||||
|
||||
self.pool.pop_ci(&mut self.ci);
|
||||
self.ci.scope.vars = scope;
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn infer_type(&mut self, expr: &Expr) -> ty::Id {
|
||||
self.pool.save_ci(&self.ci);
|
||||
let ty = self.expr(expr).map_or(ty::Id::NEVER, |v| v.ty);
|
||||
self.pool.restore_ci(&mut self.ci);
|
||||
ty
|
||||
}
|
||||
|
||||
fn on_reuse(&mut self, existing: ty::Id) {
|
||||
if let ty::Kind::Func(id) = existing.expand()
|
||||
&& let func = &mut self.tys.ins.funcs[id as usize]
|
||||
&& let Err(idx) = task::unpack(func.offset)
|
||||
&& idx < self.tys.tasks.len()
|
||||
{
|
||||
func.offset = task::id(self.tys.tasks.len());
|
||||
let task = self.tys.tasks[idx].take();
|
||||
self.tys.tasks.push(task);
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_global(&mut self, file: FileId, name: Ident, expr: &Expr) -> ty::Id {
|
||||
let gid = self.tys.ins.globals.len() as ty::Global;
|
||||
self.tys.ins.globals.push(Global { file, name, ..Default::default() });
|
||||
|
||||
let ty = ty::Kind::Global(gid);
|
||||
self.pool.push_ci(file, None, self.tys.tasks.len(), &mut self.ci);
|
||||
let prev_err_len = self.errors.borrow().len();
|
||||
|
||||
self.expr(&(Expr::Return { pos: expr.pos(), val: Some(expr) }));
|
||||
|
||||
self.ci.finalize();
|
||||
|
||||
let ret = self.ci.ret.expect("for return type to be infered");
|
||||
if self.errors.borrow().len() == prev_err_len {
|
||||
let mut mem = vec![0u8; self.tys.size_of(ret) as usize];
|
||||
self.emit_and_eval(file, ret, &mut mem);
|
||||
self.tys.ins.globals[gid as usize].data = mem;
|
||||
}
|
||||
|
||||
self.pool.pop_ci(&mut self.ci);
|
||||
self.tys.ins.globals[gid as usize].ty = ret;
|
||||
|
||||
ty.compress()
|
||||
}
|
||||
|
||||
fn report(&self, pos: Pos, msg: impl Display) -> ty::Id {
|
||||
self.report(pos, msg);
|
||||
panic!("{}", self.errors.borrow());
|
||||
ty::Id::NEVER
|
||||
}
|
||||
|
||||
fn find_local_ty(&mut self, ident: Ident) -> Option<ty::Id> {
|
||||
self.ci.scope.vars.iter().rfind(|v| (v.id == ident && v.value() == NEVER)).map(|v| v.ty)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4373,17 +4401,108 @@ fn common_dom(mut a: Nid, mut b: Nid, nodes: &mut Nodes) -> Nid {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use {
|
||||
super::{Codegen, CodegenCtx},
|
||||
crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{self},
|
||||
},
|
||||
alloc::{string::String, vec::Vec},
|
||||
core::fmt::Write,
|
||||
core::{fmt::Write, hash::BuildHasher, ops::Range},
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
struct Rand(pub u64);
|
||||
|
||||
impl Rand {
|
||||
pub fn next(&mut self) -> u64 {
|
||||
self.0 = crate::FnvBuildHasher::default().hash_one(self.0);
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn range(&mut self, min: u64, max: u64) -> u64 {
|
||||
self.next() % (max - min) + min
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FuncGen {
|
||||
rand: Rand,
|
||||
buf: String,
|
||||
}
|
||||
|
||||
impl FuncGen {
|
||||
fn gen(&mut self, seed: u64) -> &str {
|
||||
self.rand = Rand(seed);
|
||||
self.buf.clear();
|
||||
self.buf.push_str("main := fn(): void { return ");
|
||||
self.expr().unwrap();
|
||||
self.buf.push('}');
|
||||
&self.buf
|
||||
}
|
||||
|
||||
fn expr(&mut self) -> core::fmt::Result {
|
||||
match self.rand.range(0, 100) {
|
||||
0..80 => {
|
||||
write!(self.buf, "{}", self.rand.next())
|
||||
}
|
||||
80..100 => {
|
||||
self.expr()?;
|
||||
let ops = [
|
||||
TokenKind::Add,
|
||||
TokenKind::Sub,
|
||||
TokenKind::Mul,
|
||||
TokenKind::Div,
|
||||
TokenKind::Shl,
|
||||
TokenKind::Eq,
|
||||
TokenKind::Ne,
|
||||
TokenKind::Lt,
|
||||
TokenKind::Gt,
|
||||
TokenKind::Le,
|
||||
TokenKind::Ge,
|
||||
TokenKind::Band,
|
||||
TokenKind::Bor,
|
||||
TokenKind::Xor,
|
||||
TokenKind::Mod,
|
||||
TokenKind::Shr,
|
||||
];
|
||||
let op = ops[self.rand.range(0, ops.len() as u64) as usize];
|
||||
write!(self.buf, " {op} ")?;
|
||||
self.expr()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fuzz(seed_range: Range<u64>) {
|
||||
let mut gen = FuncGen::default();
|
||||
let mut ctx = CodegenCtx::default();
|
||||
for i in seed_range {
|
||||
ctx.clear();
|
||||
let src = gen.gen(i);
|
||||
let parsed = parser::Ast::new("fuzz", src, &mut ctx.parser, &mut parser::no_loader);
|
||||
|
||||
let mut cdg = Codegen::new(core::slice::from_ref(&parsed), &mut ctx);
|
||||
cdg.generate(0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn fuzz_test() {
|
||||
_ = log::set_logger(&crate::fs::Logger);
|
||||
log::set_max_level(log::LevelFilter::Info);
|
||||
fuzz(0..10000);
|
||||
}
|
||||
|
||||
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
||||
_ = log::set_logger(&crate::fs::Logger);
|
||||
// log::set_max_level(log::LevelFilter::Info);
|
||||
// log::set_max_level(log::LevelFilter::Trace);
|
||||
log::set_max_level(log::LevelFilter::Info);
|
||||
//log::set_max_level(log::LevelFilter::Trace);
|
||||
|
||||
let (ref files, embeds) = crate::test_parse_files(ident, input);
|
||||
let mut codegen = super::Codegen { files, ..Default::default() };
|
||||
let mut ctx = CodegenCtx::default();
|
||||
let (ref files, embeds) = crate::test_parse_files(ident, input, &mut ctx.parser);
|
||||
let mut codegen = super::Codegen::new(files, &mut ctx);
|
||||
codegen.push_embeds(embeds);
|
||||
|
||||
codegen.generate(0);
|
||||
|
|
Loading…
Reference in a new issue