fixing undescriptive error or not enough arguments
This commit is contained in:
parent
faa8dd2e6f
commit
517850f283
|
@ -19,7 +19,7 @@ unsafe extern "C" fn fmt() {
|
||||||
let code = core::str::from_raw_parts(core::ptr::addr_of!(INPUT).cast(), INPUT_LEN);
|
let code = core::str::from_raw_parts(core::ptr::addr_of!(INPUT).cast(), INPUT_LEN);
|
||||||
|
|
||||||
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
||||||
let mut ctx = parser::ParserCtx::default();
|
let mut ctx = parser::Ctx::default();
|
||||||
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
|
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
|
||||||
|
|
||||||
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
||||||
|
|
|
@ -53,7 +53,7 @@ unsafe fn compile_and_run(mut fuel: usize) {
|
||||||
};
|
};
|
||||||
|
|
||||||
let files = {
|
let files = {
|
||||||
let mut ctx = hblang::parser::ParserCtx::default();
|
let mut ctx = hblang::parser::Ctx::default();
|
||||||
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
||||||
let mut loader = |path: &str, _: &str, kind| match kind {
|
let mut loader = |path: &str, _: &str, kind| match kind {
|
||||||
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap() as FileId),
|
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap() as FileId),
|
||||||
|
|
|
@ -728,7 +728,9 @@ sqrt := fn(x: uint): uint {
|
||||||
g := 0
|
g := 0
|
||||||
b := 32768
|
b := 32768
|
||||||
bshift := 15
|
bshift := 15
|
||||||
loop if b == 0 break else {
|
loop if b == 0 {
|
||||||
|
break
|
||||||
|
} else {
|
||||||
bshift -= 1
|
bshift -= 1
|
||||||
temp = b + (g << 1)
|
temp = b + (g << 1)
|
||||||
temp <<= bshift
|
temp <<= bshift
|
||||||
|
|
|
@ -2649,13 +2649,21 @@ impl Codegen {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use alloc::{string::String, vec::Vec};
|
use {
|
||||||
|
crate::parser,
|
||||||
|
alloc::{string::String, vec::Vec},
|
||||||
|
};
|
||||||
|
|
||||||
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
||||||
_ = log::set_logger(&crate::fs::Logger);
|
_ = log::set_logger(&crate::fs::Logger);
|
||||||
log::set_max_level(log::LevelFilter::Debug);
|
log::set_max_level(log::LevelFilter::Debug);
|
||||||
|
|
||||||
let (files, embeds) = crate::test_parse_files(ident, input);
|
let mut ctx = parser::Ctx::default();
|
||||||
|
let (files, embeds) = crate::test_parse_files(ident, input, &mut ctx);
|
||||||
|
if !ctx.errors.get_mut().is_empty() {
|
||||||
|
output.push_str(ctx.errors.get_mut());
|
||||||
|
return;
|
||||||
|
}
|
||||||
let mut codegen = super::Codegen { files, ..Default::default() };
|
let mut codegen = super::Codegen { files, ..Default::default() };
|
||||||
codegen.push_embeds(embeds);
|
codegen.push_embeds(embeds);
|
||||||
|
|
||||||
|
|
|
@ -451,7 +451,7 @@ pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Res
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod test {
|
pub mod test {
|
||||||
use {
|
use {
|
||||||
crate::parser::{self, ParserCtx},
|
crate::parser::{self, Ctx},
|
||||||
alloc::borrow::ToOwned,
|
alloc::borrow::ToOwned,
|
||||||
std::{fmt::Write, string::String},
|
std::{fmt::Write, string::String},
|
||||||
};
|
};
|
||||||
|
@ -461,8 +461,7 @@ pub mod test {
|
||||||
let len = crate::fmt::minify(&mut minned);
|
let len = crate::fmt::minify(&mut minned);
|
||||||
minned.truncate(len);
|
minned.truncate(len);
|
||||||
|
|
||||||
let ast =
|
let ast = parser::Ast::new(ident, minned, &mut Ctx::default(), &mut parser::no_loader);
|
||||||
parser::Ast::new(ident, minned, &mut ParserCtx::default(), &mut parser::no_loader);
|
|
||||||
//log::error!(
|
//log::error!(
|
||||||
// "{} / {} = {} | {} / {} = {}",
|
// "{} / {} = {} | {} / {} = {}",
|
||||||
// ast.mem.size(),
|
// ast.mem.size(),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use {
|
use {
|
||||||
crate::{
|
crate::{
|
||||||
codegen,
|
codegen,
|
||||||
parser::{self, Ast, FileKind, ParserCtx},
|
parser::{self, Ast, Ctx, FileKind},
|
||||||
son,
|
son,
|
||||||
},
|
},
|
||||||
alloc::{string::String, vec::Vec},
|
alloc::{string::String, vec::Vec},
|
||||||
|
@ -89,10 +89,11 @@ pub fn run_compiler(root_file: &str, options: Options, out: &mut Vec<u8>) -> std
|
||||||
let ast = parsed.ast.into_iter().next().unwrap();
|
let ast = parsed.ast.into_iter().next().unwrap();
|
||||||
write!(out, "{ast}").unwrap();
|
write!(out, "{ast}").unwrap();
|
||||||
} else if options.optimize {
|
} else if options.optimize {
|
||||||
let mut codegen = son::Codegen::default();
|
let mut ctx = crate::son::CodegenCtx::default();
|
||||||
codegen.files = &parsed.ast;
|
*ctx.parser.errors.get_mut() = parsed.errors;
|
||||||
codegen.push_embeds(parsed.embeds);
|
let mut codegen = son::Codegen::new(&parsed.ast, &mut ctx);
|
||||||
|
|
||||||
|
codegen.push_embeds(parsed.embeds);
|
||||||
codegen.generate(0);
|
codegen.generate(0);
|
||||||
|
|
||||||
if !codegen.errors.borrow().is_empty() {
|
if !codegen.errors.borrow().is_empty() {
|
||||||
|
@ -108,6 +109,11 @@ pub fn run_compiler(root_file: &str, options: Options, out: &mut Vec<u8>) -> std
|
||||||
codegen.assemble(out);
|
codegen.assemble(out);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
if !parsed.errors.is_empty() {
|
||||||
|
log::error!("{}", parsed.errors);
|
||||||
|
return Err(std::io::Error::other("parsing failed"));
|
||||||
|
}
|
||||||
|
|
||||||
let mut codegen = codegen::Codegen::default();
|
let mut codegen = codegen::Codegen::default();
|
||||||
codegen.files = parsed.ast;
|
codegen.files = parsed.ast;
|
||||||
codegen.push_embeds(parsed.embeds);
|
codegen.push_embeds(parsed.embeds);
|
||||||
|
@ -213,6 +219,7 @@ impl<T> TaskQueueInner<T> {
|
||||||
pub struct Loaded {
|
pub struct Loaded {
|
||||||
ast: Vec<Ast>,
|
ast: Vec<Ast>,
|
||||||
embeds: Vec<Vec<u8>>,
|
embeds: Vec<Vec<u8>>,
|
||||||
|
errors: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||||
|
@ -334,7 +341,7 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let thread = || {
|
let thread = || {
|
||||||
let mut ctx = ParserCtx::default();
|
let mut ctx = Ctx::default();
|
||||||
let mut tmp = PathBuf::new();
|
let mut tmp = PathBuf::new();
|
||||||
while let Some(task @ (indx, ..)) = tasks.pop() {
|
while let Some(task @ (indx, ..)) = tasks.pop() {
|
||||||
let res = execute_task(&mut ctx, task, &mut tmp);
|
let res = execute_task(&mut ctx, task, &mut tmp);
|
||||||
|
@ -343,6 +350,7 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||||
ast.resize_with(len, || Err(io::ErrorKind::InvalidData.into()));
|
ast.resize_with(len, || Err(io::ErrorKind::InvalidData.into()));
|
||||||
ast[indx as usize] = res;
|
ast[indx as usize] = res;
|
||||||
}
|
}
|
||||||
|
ctx.errors.into_inner()
|
||||||
};
|
};
|
||||||
|
|
||||||
let path = Path::new(root).canonicalize().map_err(|e| {
|
let path = Path::new(root).canonicalize().map_err(|e| {
|
||||||
|
@ -351,15 +359,23 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||||
seen_modules.lock().unwrap().insert(path.clone(), 0);
|
seen_modules.lock().unwrap().insert(path.clone(), 0);
|
||||||
tasks.push((0, path));
|
tasks.push((0, path));
|
||||||
|
|
||||||
if extra_threads == 0 {
|
let errors = if extra_threads == 0 {
|
||||||
thread();
|
thread()
|
||||||
} else {
|
} else {
|
||||||
std::thread::scope(|s| (0..extra_threads + 1).for_each(|_| _ = s.spawn(thread)));
|
std::thread::scope(|s| {
|
||||||
}
|
(0..extra_threads + 1)
|
||||||
|
.map(|_| s.spawn(thread))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.into_iter()
|
||||||
|
.map(|t| t.join().unwrap())
|
||||||
|
.collect::<String>()
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
Ok(Loaded {
|
Ok(Loaded {
|
||||||
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
|
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
|
||||||
embeds: embeds.into_inner().unwrap(),
|
embeds: embeds.into_inner().unwrap(),
|
||||||
|
errors,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,6 +51,8 @@ macro_rules! gen_token_kind {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl $name {
|
impl $name {
|
||||||
|
pub const OPS: &[Self] = &[$($(Self::$op),*),*];
|
||||||
|
|
||||||
pub fn name(&self) -> &str {
|
pub fn name(&self) -> &str {
|
||||||
let sf = unsafe { &*(self as *const _ as *const u8) } ;
|
let sf = unsafe { &*(self as *const _ as *const u8) } ;
|
||||||
match *self {
|
match *self {
|
||||||
|
@ -279,11 +281,16 @@ impl TokenKind {
|
||||||
Self::Shl => a.wrapping_shl(b as _),
|
Self::Shl => a.wrapping_shl(b as _),
|
||||||
Self::Eq => (a == b) as i64,
|
Self::Eq => (a == b) as i64,
|
||||||
Self::Ne => (a != b) as i64,
|
Self::Ne => (a != b) as i64,
|
||||||
|
Self::Lt => (a < b) as i64,
|
||||||
|
Self::Gt => (a > b) as i64,
|
||||||
|
Self::Le => (a >= b) as i64,
|
||||||
|
Self::Ge => (a <= b) as i64,
|
||||||
Self::Band => a & b,
|
Self::Band => a & b,
|
||||||
Self::Bor => a | b,
|
Self::Bor => a | b,
|
||||||
Self::Xor => a ^ b,
|
Self::Xor => a ^ b,
|
||||||
Self::Mod => a % b,
|
Self::Mod if b == 0 => 0,
|
||||||
Self::Shr => a >> b,
|
Self::Mod => a.wrapping_rem(b),
|
||||||
|
Self::Shr => a.wrapping_shr(b as _),
|
||||||
s => todo!("{s}"),
|
s => todo!("{s}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -316,6 +323,17 @@ impl TokenKind {
|
||||||
s => todo!("{s}"),
|
s => todo!("{s}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn closing(&self) -> Option<TokenKind> {
|
||||||
|
Some(match self {
|
||||||
|
Self::Ctor => Self::RBrace,
|
||||||
|
Self::Tupl => Self::RParen,
|
||||||
|
Self::LParen => Self::RParen,
|
||||||
|
Self::LBrack => Self::RBrack,
|
||||||
|
Self::LBrace => Self::RBrace,
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
gen_token_kind! {
|
gen_token_kind! {
|
||||||
|
|
|
@ -930,6 +930,11 @@ impl IdentInterner {
|
||||||
fn project(&self, ident: &str) -> Option<Ident> {
|
fn project(&self, ident: &str) -> Option<Ident> {
|
||||||
self.lookup.get(ident, &self.strings).copied()
|
self.lookup.get(ident, &self.strings).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clear(&mut self) {
|
||||||
|
self.lookup.clear();
|
||||||
|
self.strings.clear()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -946,20 +951,36 @@ pub struct TypeIns {
|
||||||
funcs: Vec<Func>,
|
funcs: Vec<Func>,
|
||||||
args: Vec<ty::Id>,
|
args: Vec<ty::Id>,
|
||||||
globals: Vec<Global>,
|
globals: Vec<Global>,
|
||||||
// TODO: use ctx map
|
|
||||||
strings: HashMap<Vec<u8>, ty::Global>,
|
|
||||||
structs: Vec<Struct>,
|
structs: Vec<Struct>,
|
||||||
fields: Vec<Field>,
|
fields: Vec<Field>,
|
||||||
ptrs: Vec<Ptr>,
|
ptrs: Vec<Ptr>,
|
||||||
slices: Vec<Array>,
|
slices: Vec<Array>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct FTask {
|
||||||
|
file: FileId,
|
||||||
|
id: ty::Func,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StringRef(ty::Global);
|
||||||
|
|
||||||
|
impl ctx_map::CtxEntry for StringRef {
|
||||||
|
type Ctx = [Global];
|
||||||
|
type Key<'a> = &'a [u8];
|
||||||
|
|
||||||
|
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a> {
|
||||||
|
&ctx[self.0 as usize].data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct Types {
|
struct Types {
|
||||||
syms: ctx_map::CtxMap<ty::Id>,
|
syms: ctx_map::CtxMap<ty::Id>,
|
||||||
names: IdentInterner,
|
names: IdentInterner,
|
||||||
|
strings: ctx_map::CtxMap<StringRef>,
|
||||||
ins: TypeIns,
|
ins: TypeIns,
|
||||||
tmp: TypesTmp,
|
tmp: TypesTmp,
|
||||||
|
tasks: Vec<Option<FTask>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
const HEADER_SIZE: usize = core::mem::size_of::<AbleOsExecutableHeader>();
|
const HEADER_SIZE: usize = core::mem::size_of::<AbleOsExecutableHeader>();
|
||||||
|
@ -1444,6 +1465,28 @@ impl Types {
|
||||||
let name = self.names.project(name)?;
|
let name = self.names.project(name)?;
|
||||||
self.struct_fields(s).iter().position(|f| f.name == name)
|
self.struct_fields(s).iter().position(|f| f.name == name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clear(&mut self) {
|
||||||
|
self.syms.clear();
|
||||||
|
self.names.clear();
|
||||||
|
self.strings.clear();
|
||||||
|
|
||||||
|
self.ins.funcs.clear();
|
||||||
|
self.ins.args.clear();
|
||||||
|
self.ins.globals.clear();
|
||||||
|
self.ins.structs.clear();
|
||||||
|
self.ins.fields.clear();
|
||||||
|
self.ins.ptrs.clear();
|
||||||
|
self.ins.slices.clear();
|
||||||
|
|
||||||
|
debug_assert_eq!(self.tmp.fields.len(), 0);
|
||||||
|
debug_assert_eq!(self.tmp.frontier.len(), 0);
|
||||||
|
debug_assert_eq!(self.tmp.globals.len(), 0);
|
||||||
|
debug_assert_eq!(self.tmp.funcs.len(), 0);
|
||||||
|
debug_assert_eq!(self.tmp.args.len(), 0);
|
||||||
|
|
||||||
|
debug_assert_eq!(self.tasks.len(), 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct OffsetIter {
|
struct OffsetIter {
|
||||||
|
@ -1559,6 +1602,10 @@ impl Comptime {
|
||||||
fn pop_pc(&mut self, prev_pc: hbvm::mem::Address) {
|
fn pop_pc(&mut self, prev_pc: hbvm::mem::Address) {
|
||||||
self.vm.pc = prev_pc + self.code.as_ptr() as usize;
|
self.vm.pc = prev_pc + self.code.as_ptr() as usize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clear(&mut self) {
|
||||||
|
self.code.clear();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Comptime {
|
impl Default for Comptime {
|
||||||
|
@ -1641,13 +1688,17 @@ pub fn run_test(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn test_parse_files(ident: &'static str, input: &'static str) -> (Vec<parser::Ast>, Vec<Vec<u8>>) {
|
fn test_parse_files(
|
||||||
|
ident: &str,
|
||||||
|
input: &str,
|
||||||
|
ctx: &mut parser::Ctx,
|
||||||
|
) -> (Vec<parser::Ast>, Vec<Vec<u8>>) {
|
||||||
use {
|
use {
|
||||||
self::parser::FileKind,
|
self::parser::FileKind,
|
||||||
std::{borrow::ToOwned, string::ToString},
|
std::{borrow::ToOwned, string::ToString},
|
||||||
};
|
};
|
||||||
|
|
||||||
fn find_block(mut input: &'static str, test_name: &'static str) -> &'static str {
|
fn find_block<'a>(mut input: &'a str, test_name: &str) -> &'a str {
|
||||||
const CASE_PREFIX: &str = "#### ";
|
const CASE_PREFIX: &str = "#### ";
|
||||||
const CASE_SUFFIX: &str = "\n```hb";
|
const CASE_SUFFIX: &str = "\n```hb";
|
||||||
loop {
|
loop {
|
||||||
|
@ -1707,13 +1758,10 @@ fn test_parse_files(ident: &'static str, input: &'static str) -> (Vec<parser::As
|
||||||
.ok_or("Embed Not Found".to_string()),
|
.ok_or("Embed Not Found".to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut ctx = parser::ParserCtx::default();
|
|
||||||
(
|
(
|
||||||
module_map
|
module_map
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&(path, content)| {
|
.map(|&(path, content)| parser::Ast::new(path, content.to_owned(), ctx, &mut loader))
|
||||||
parser::Ast::new(path, content.to_owned(), &mut ctx, &mut loader)
|
|
||||||
})
|
|
||||||
.collect(),
|
.collect(),
|
||||||
embed_map.iter().map(|&(_, content)| content.to_owned().into_bytes()).collect(),
|
embed_map.iter().map(|&(_, content)| content.to_owned().into_bytes()).collect(),
|
||||||
)
|
)
|
||||||
|
|
|
@ -7,7 +7,7 @@ use {
|
||||||
alloc::{boxed::Box, string::String, vec::Vec},
|
alloc::{boxed::Box, string::String, vec::Vec},
|
||||||
core::{
|
core::{
|
||||||
alloc::Layout,
|
alloc::Layout,
|
||||||
cell::UnsafeCell,
|
cell::{RefCell, UnsafeCell},
|
||||||
fmt::{self},
|
fmt::{self},
|
||||||
intrinsics::unlikely,
|
intrinsics::unlikely,
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
|
@ -19,7 +19,6 @@ use {
|
||||||
|
|
||||||
pub type Pos = u32;
|
pub type Pos = u32;
|
||||||
pub type IdentFlags = u32;
|
pub type IdentFlags = u32;
|
||||||
pub type Symbols = Vec<Symbol>;
|
|
||||||
pub type FileId = u32;
|
pub type FileId = u32;
|
||||||
pub type IdentIndex = u16;
|
pub type IdentIndex = u16;
|
||||||
pub type LoaderError = String;
|
pub type LoaderError = String;
|
||||||
|
@ -31,6 +30,20 @@ pub enum FileKind {
|
||||||
Embed,
|
Embed,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trait Trans {
|
||||||
|
fn trans(self) -> Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Trans for Option<Option<T>> {
|
||||||
|
fn trans(self) -> Self {
|
||||||
|
match self {
|
||||||
|
Some(None) => None,
|
||||||
|
Some(Some(v)) => Some(Some(v)),
|
||||||
|
None => Some(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub const SOURCE_TO_AST_FACTOR: usize = 7 * (core::mem::size_of::<usize>() / 4) + 1;
|
pub const SOURCE_TO_AST_FACTOR: usize = 7 * (core::mem::size_of::<usize>() / 4) + 1;
|
||||||
|
|
||||||
pub mod idfl {
|
pub mod idfl {
|
||||||
|
@ -73,7 +86,7 @@ pub struct Parser<'a, 'b> {
|
||||||
loader: Loader<'b>,
|
loader: Loader<'b>,
|
||||||
lexer: Lexer<'a>,
|
lexer: Lexer<'a>,
|
||||||
arena: &'a Arena,
|
arena: &'a Arena,
|
||||||
ctx: &'b mut ParserCtx,
|
ctx: &'b mut Ctx,
|
||||||
token: Token,
|
token: Token,
|
||||||
ns_bound: usize,
|
ns_bound: usize,
|
||||||
trailing_sep: bool,
|
trailing_sep: bool,
|
||||||
|
@ -82,7 +95,7 @@ pub struct Parser<'a, 'b> {
|
||||||
|
|
||||||
impl<'a, 'b> Parser<'a, 'b> {
|
impl<'a, 'b> Parser<'a, 'b> {
|
||||||
pub fn parse(
|
pub fn parse(
|
||||||
ctx: &'b mut ParserCtx,
|
ctx: &'b mut Ctx,
|
||||||
input: &'a str,
|
input: &'a str,
|
||||||
path: &'b str,
|
path: &'b str,
|
||||||
loader: Loader<'b>,
|
loader: Loader<'b>,
|
||||||
|
@ -110,23 +123,17 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
|
|
||||||
if !self.ctx.idents.is_empty() {
|
if !self.ctx.idents.is_empty() {
|
||||||
// TODO: we need error recovery
|
// TODO: we need error recovery
|
||||||
log::error!("{}", {
|
let mut idents = core::mem::take(&mut self.ctx.idents);
|
||||||
let mut errors = String::new();
|
for id in idents.drain(..) {
|
||||||
for id in self.ctx.idents.drain(..) {
|
self.report(
|
||||||
report_to(
|
|
||||||
self.lexer.source(),
|
|
||||||
self.path,
|
|
||||||
ident::pos(id.ident),
|
ident::pos(id.ident),
|
||||||
&format_args!(
|
format_args!(
|
||||||
"undeclared identifier: {}",
|
"undeclared identifier: {}",
|
||||||
self.lexer.slice(ident::range(id.ident))
|
self.lexer.slice(ident::range(id.ident))
|
||||||
),
|
),
|
||||||
&mut errors,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
errors
|
self.ctx.idents = idents;
|
||||||
});
|
|
||||||
unreachable!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
f
|
f
|
||||||
|
@ -136,20 +143,20 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
core::mem::replace(&mut self.token, self.lexer.eat())
|
core::mem::replace(&mut self.token, self.lexer.eat())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ptr_expr(&mut self) -> &'a Expr<'a> {
|
fn ptr_expr(&mut self) -> Option<&'a Expr<'a>> {
|
||||||
self.arena.alloc(self.expr())
|
Some(self.arena.alloc(self.expr()?))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_low(&mut self, top_level: bool) -> Expr<'a> {
|
fn expr_low(&mut self, top_level: bool) -> Option<Expr<'a>> {
|
||||||
let left = self.unit_expr();
|
let left = self.unit_expr()?;
|
||||||
self.bin_expr(left, 0, top_level)
|
self.bin_expr(left, 0, top_level)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr(&mut self) -> Expr<'a> {
|
fn expr(&mut self) -> Option<Expr<'a>> {
|
||||||
self.expr_low(false)
|
self.expr_low(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bin_expr(&mut self, mut fold: Expr<'a>, min_prec: u8, top_level: bool) -> Expr<'a> {
|
fn bin_expr(&mut self, mut fold: Expr<'a>, min_prec: u8, top_level: bool) -> Option<Expr<'a>> {
|
||||||
loop {
|
loop {
|
||||||
let Some(prec) = self.token.kind.precedence() else {
|
let Some(prec) = self.token.kind.precedence() else {
|
||||||
break;
|
break;
|
||||||
|
@ -165,8 +172,8 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
self.declare_rec(&fold, top_level);
|
self.declare_rec(&fold, top_level);
|
||||||
}
|
}
|
||||||
|
|
||||||
let right = self.unit_expr();
|
let right = self.unit_expr()?;
|
||||||
let right = self.bin_expr(right, prec, false);
|
let right = self.bin_expr(right, prec, false)?;
|
||||||
let right = self.arena.alloc(right);
|
let right = self.arena.alloc(right);
|
||||||
let left = self.arena.alloc(fold);
|
let left = self.arena.alloc(fold);
|
||||||
|
|
||||||
|
@ -187,7 +194,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fold
|
Some(fold)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn declare_rec(&mut self, expr: &Expr, top_level: bool) {
|
fn declare_rec(&mut self, expr: &Expr, top_level: bool) {
|
||||||
|
@ -200,7 +207,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
self.declare_rec(value, top_level)
|
self.declare_rec(value, top_level)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => self.report(expr.pos(), "cant declare this shit (yet)"),
|
_ => _ = self.report(expr.pos(), "cant declare this shit (yet)"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -217,12 +224,14 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
|
|
||||||
let Ok(index) = self.ctx.idents.binary_search_by_key(&id, |s| s.ident) else {
|
let Ok(index) = self.ctx.idents.binary_search_by_key(&id, |s| s.ident) else {
|
||||||
self.report(pos, "the identifier is rezerved for a builtin (proably)");
|
self.report(pos, "the identifier is rezerved for a builtin (proably)");
|
||||||
|
return;
|
||||||
};
|
};
|
||||||
if core::mem::replace(&mut self.ctx.idents[index].declared, true) {
|
if core::mem::replace(&mut self.ctx.idents[index].declared, true) {
|
||||||
self.report(
|
self.report(
|
||||||
pos,
|
pos,
|
||||||
format_args!("redeclaration of identifier: {}", self.lexer.slice(ident::range(id))),
|
format_args!("redeclaration of identifier: {}", self.lexer.slice(ident::range(id))),
|
||||||
)
|
);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.ctx.idents[index].ordered = ordered;
|
self.ctx.idents[index].ordered = ordered;
|
||||||
|
@ -245,11 +254,16 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
{
|
{
|
||||||
Some((i, elem)) => (i, elem, false),
|
Some((i, elem)) => (i, elem, false),
|
||||||
None => {
|
None => {
|
||||||
let Some(id) = ident::new(token.start, name.len() as _) else {
|
let ident = match ident::new(token.start, name.len() as _) {
|
||||||
|
None => {
|
||||||
self.report(token.start, "identifier can at most have 64 characters");
|
self.report(token.start, "identifier can at most have 64 characters");
|
||||||
|
ident::new(token.start, 64).unwrap()
|
||||||
|
}
|
||||||
|
Some(id) => id,
|
||||||
};
|
};
|
||||||
|
|
||||||
self.ctx.idents.push(ScopeIdent {
|
self.ctx.idents.push(ScopeIdent {
|
||||||
ident: id,
|
ident,
|
||||||
declared: false,
|
declared: false,
|
||||||
ordered: false,
|
ordered: false,
|
||||||
flags: 0,
|
flags: 0,
|
||||||
|
@ -271,18 +285,18 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
self.lexer.slice(range.range())
|
self.lexer.slice(range.range())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unit_expr(&mut self) -> Expr<'a> {
|
fn unit_expr(&mut self) -> Option<Expr<'a>> {
|
||||||
use {Expr as E, TokenKind as T};
|
use {Expr as E, TokenKind as T};
|
||||||
let frame = self.ctx.idents.len();
|
let frame = self.ctx.idents.len();
|
||||||
let token @ Token { start: pos, .. } = self.next();
|
let token @ Token { start: pos, .. } = self.next();
|
||||||
let prev_boundary = self.ns_bound;
|
let prev_boundary = self.ns_bound;
|
||||||
let prev_captured = self.ctx.captured.len();
|
let prev_captured = self.ctx.captured.len();
|
||||||
let mut expr = match token.kind {
|
let mut expr = match token.kind {
|
||||||
T::Ct => E::Ct { pos, value: self.ptr_expr() },
|
T::Ct => E::Ct { pos, value: self.ptr_expr()? },
|
||||||
T::Directive if self.lexer.slice(token.range()) == "use" => {
|
T::Directive if self.lexer.slice(token.range()) == "use" => {
|
||||||
self.expect_advance(TokenKind::LParen);
|
self.expect_advance(TokenKind::LParen)?;
|
||||||
let str = self.expect_advance(TokenKind::DQuote);
|
let str = self.expect_advance(TokenKind::DQuote)?;
|
||||||
self.expect_advance(TokenKind::RParen);
|
self.expect_advance(TokenKind::RParen)?;
|
||||||
let path = self.lexer.slice(str.range());
|
let path = self.lexer.slice(str.range());
|
||||||
let path = &path[1..path.len() - 1];
|
let path = &path[1..path.len() - 1];
|
||||||
|
|
||||||
|
@ -292,15 +306,15 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
id: match (self.loader)(path, self.path, FileKind::Module) {
|
id: match (self.loader)(path, self.path, FileKind::Module) {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
self.report(str.start, format_args!("error loading dependency: {e:#}"))
|
self.report(str.start, format_args!("error loading dependency: {e:#}"))?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
T::Directive if self.lexer.slice(token.range()) == "embed" => {
|
T::Directive if self.lexer.slice(token.range()) == "embed" => {
|
||||||
self.expect_advance(TokenKind::LParen);
|
self.expect_advance(TokenKind::LParen)?;
|
||||||
let str = self.expect_advance(TokenKind::DQuote);
|
let str = self.expect_advance(TokenKind::DQuote)?;
|
||||||
self.expect_advance(TokenKind::RParen);
|
self.expect_advance(TokenKind::RParen)?;
|
||||||
let path = self.lexer.slice(str.range());
|
let path = self.lexer.slice(str.range());
|
||||||
let path = &path[1..path.len() - 1];
|
let path = &path[1..path.len() - 1];
|
||||||
|
|
||||||
|
@ -309,8 +323,10 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
path,
|
path,
|
||||||
id: match (self.loader)(path, self.path, FileKind::Embed) {
|
id: match (self.loader)(path, self.path, FileKind::Embed) {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(e) => self
|
Err(e) => self.report(
|
||||||
.report(str.start, format_args!("error loading embedded file: {e:#}")),
|
str.start,
|
||||||
|
format_args!("error loading embedded file: {e:#}"),
|
||||||
|
)?,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -318,7 +334,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
pos: pos - 1, // need to undo the directive shift
|
pos: pos - 1, // need to undo the directive shift
|
||||||
name: self.tok_str(token),
|
name: self.tok_str(token),
|
||||||
args: {
|
args: {
|
||||||
self.expect_advance(T::LParen);
|
self.expect_advance(T::LParen)?;
|
||||||
self.collect_list(T::Comma, T::RParen, Self::expr)
|
self.collect_list(T::Comma, T::RParen, Self::expr)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -328,7 +344,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
T::DQuote => E::String { pos, literal: self.tok_str(token) },
|
T::DQuote => E::String { pos, literal: self.tok_str(token) },
|
||||||
T::Packed => {
|
T::Packed => {
|
||||||
self.packed = true;
|
self.packed = true;
|
||||||
let expr = self.unit_expr();
|
let expr = self.unit_expr()?;
|
||||||
if self.packed {
|
if self.packed {
|
||||||
self.report(
|
self.report(
|
||||||
expr.pos(),
|
expr.pos(),
|
||||||
|
@ -342,20 +358,20 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
packed: core::mem::take(&mut self.packed),
|
packed: core::mem::take(&mut self.packed),
|
||||||
fields: {
|
fields: {
|
||||||
self.ns_bound = self.ctx.idents.len();
|
self.ns_bound = self.ctx.idents.len();
|
||||||
self.expect_advance(T::LBrace);
|
self.expect_advance(T::LBrace)?;
|
||||||
self.collect_list(T::Comma, T::RBrace, |s| {
|
self.collect_list(T::Comma, T::RBrace, |s| {
|
||||||
let tok = s.token;
|
let tok = s.token;
|
||||||
if s.advance_if(T::Comment) {
|
Some(if s.advance_if(T::Comment) {
|
||||||
CommentOr::Comment { literal: s.tok_str(tok), pos: tok.start }
|
CommentOr::Comment { literal: s.tok_str(tok), pos: tok.start }
|
||||||
} else {
|
} else {
|
||||||
let name = s.expect_advance(T::Ident);
|
let name = s.expect_advance(T::Ident)?;
|
||||||
s.expect_advance(T::Colon);
|
s.expect_advance(T::Colon)?;
|
||||||
CommentOr::Or(StructField {
|
CommentOr::Or(StructField {
|
||||||
pos: name.start,
|
pos: name.start,
|
||||||
name: s.tok_str(name),
|
name: s.tok_str(name),
|
||||||
ty: s.expr(),
|
ty: s.expr()?,
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
captured: {
|
captured: {
|
||||||
|
@ -381,11 +397,11 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
}
|
}
|
||||||
T::If => E::If {
|
T::If => E::If {
|
||||||
pos,
|
pos,
|
||||||
cond: self.ptr_expr(),
|
cond: self.ptr_expr()?,
|
||||||
then: self.ptr_expr(),
|
then: self.ptr_expr()?,
|
||||||
else_: self.advance_if(T::Else).then(|| self.ptr_expr()),
|
else_: self.advance_if(T::Else).then(|| self.ptr_expr()).trans()?,
|
||||||
},
|
},
|
||||||
T::Loop => E::Loop { pos, body: self.ptr_expr() },
|
T::Loop => E::Loop { pos, body: self.ptr_expr()? },
|
||||||
T::Break => E::Break { pos },
|
T::Break => E::Break { pos },
|
||||||
T::Continue => E::Continue { pos },
|
T::Continue => E::Continue { pos },
|
||||||
T::Return => E::Return {
|
T::Return => E::Return {
|
||||||
|
@ -394,39 +410,40 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
self.token.kind,
|
self.token.kind,
|
||||||
T::Semi | T::RBrace | T::RBrack | T::RParen | T::Comma
|
T::Semi | T::RBrace | T::RBrack | T::RParen | T::Comma
|
||||||
))
|
))
|
||||||
.then(|| self.ptr_expr()),
|
.then(|| self.ptr_expr())
|
||||||
|
.trans()?,
|
||||||
},
|
},
|
||||||
T::Fn => E::Closure {
|
T::Fn => E::Closure {
|
||||||
pos,
|
pos,
|
||||||
args: {
|
args: {
|
||||||
self.expect_advance(T::LParen);
|
self.expect_advance(T::LParen)?;
|
||||||
self.collect_list(T::Comma, T::RParen, |s| {
|
self.collect_list(T::Comma, T::RParen, |s| {
|
||||||
let name = s.advance_ident();
|
let name = s.advance_ident()?;
|
||||||
let (id, _) = s.resolve_ident(name);
|
let (id, _) = s.resolve_ident(name);
|
||||||
s.declare(name.start, id, true, true);
|
s.declare(name.start, id, true, true);
|
||||||
s.expect_advance(T::Colon);
|
s.expect_advance(T::Colon)?;
|
||||||
Arg {
|
Some(Arg {
|
||||||
pos: name.start,
|
pos: name.start,
|
||||||
name: s.tok_str(name),
|
name: s.tok_str(name),
|
||||||
is_ct: name.kind == T::CtIdent,
|
is_ct: name.kind == T::CtIdent,
|
||||||
id,
|
id,
|
||||||
ty: s.expr(),
|
ty: s.expr()?,
|
||||||
}
|
})
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
ret: {
|
ret: {
|
||||||
self.expect_advance(T::Colon);
|
self.expect_advance(T::Colon)?;
|
||||||
self.ptr_expr()
|
self.ptr_expr()?
|
||||||
},
|
},
|
||||||
body: self.ptr_expr(),
|
body: self.ptr_expr()?,
|
||||||
},
|
},
|
||||||
T::Ctor => self.ctor(pos, None),
|
T::Ctor => self.ctor(pos, None),
|
||||||
T::Tupl => self.tupl(pos, None),
|
T::Tupl => self.tupl(pos, None),
|
||||||
T::LBrack => E::Slice {
|
T::LBrack => E::Slice {
|
||||||
item: self.ptr_unit_expr(),
|
item: self.ptr_unit_expr()?,
|
||||||
size: self.advance_if(T::Semi).then(|| self.ptr_expr()),
|
size: self.advance_if(T::Semi).then(|| self.ptr_expr()).trans()?,
|
||||||
pos: {
|
pos: {
|
||||||
self.expect_advance(T::RBrack);
|
self.expect_advance(T::RBrack)?;
|
||||||
pos
|
pos
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -434,7 +451,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
pos,
|
pos,
|
||||||
op: token.kind,
|
op: token.kind,
|
||||||
val: {
|
val: {
|
||||||
let expr = self.ptr_unit_expr();
|
let expr = self.ptr_unit_expr()?;
|
||||||
if token.kind == T::Band {
|
if token.kind == T::Band {
|
||||||
self.flag_idents(*expr, idfl::REFERENCED);
|
self.flag_idents(*expr, idfl::REFERENCED);
|
||||||
}
|
}
|
||||||
|
@ -454,18 +471,18 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
pos,
|
pos,
|
||||||
value: match u64::from_str_radix(slice, radix as u32) {
|
value: match u64::from_str_radix(slice, radix as u32) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(e) => self.report(token.start, format_args!("invalid number: {e}")),
|
Err(e) => self.report(token.start, format_args!("invalid number: {e}"))?,
|
||||||
} as i64,
|
} as i64,
|
||||||
radix,
|
radix,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
T::LParen => {
|
T::LParen => {
|
||||||
let expr = self.expr();
|
let expr = self.expr()?;
|
||||||
self.expect_advance(T::RParen);
|
self.expect_advance(T::RParen)?;
|
||||||
expr
|
expr
|
||||||
}
|
}
|
||||||
T::Comment => Expr::Comment { pos, literal: self.tok_str(token) },
|
T::Comment => Expr::Comment { pos, literal: self.tok_str(token) },
|
||||||
tok => self.report(token.start, format_args!("unexpected token: {tok}")),
|
tok => self.report(token.start, format_args!("unexpected token: {tok}"))?,
|
||||||
};
|
};
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
@ -485,8 +502,8 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
T::LBrack => E::Index {
|
T::LBrack => E::Index {
|
||||||
base: self.arena.alloc(expr),
|
base: self.arena.alloc(expr),
|
||||||
index: {
|
index: {
|
||||||
let index = self.expr();
|
let index = self.expr()?;
|
||||||
self.expect_advance(T::RBrack);
|
self.expect_advance(T::RBrack)?;
|
||||||
self.arena.alloc(index)
|
self.arena.alloc(index)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -494,7 +511,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
target: self.arena.alloc(expr),
|
target: self.arena.alloc(expr),
|
||||||
pos: token.start,
|
pos: token.start,
|
||||||
name: {
|
name: {
|
||||||
let token = self.expect_advance(T::Ident);
|
let token = self.expect_advance(T::Ident)?;
|
||||||
self.tok_str(token)
|
self.tok_str(token)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -506,7 +523,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
self.pop_scope(frame);
|
self.pop_scope(frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
expr
|
Some(expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>) -> Expr<'a> {
|
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>) -> Expr<'a> {
|
||||||
|
@ -523,31 +540,29 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
pos,
|
pos,
|
||||||
ty: ty.map(|ty| self.arena.alloc(ty)),
|
ty: ty.map(|ty| self.arena.alloc(ty)),
|
||||||
fields: self.collect_list(TokenKind::Comma, TokenKind::RBrace, |s| {
|
fields: self.collect_list(TokenKind::Comma, TokenKind::RBrace, |s| {
|
||||||
let name_tok = s.advance_ident();
|
let name_tok = s.advance_ident()?;
|
||||||
let name = s.tok_str(name_tok);
|
let name = s.tok_str(name_tok);
|
||||||
CtorField {
|
Some(CtorField {
|
||||||
pos: name_tok.start,
|
pos: name_tok.start,
|
||||||
name,
|
name,
|
||||||
value: if s.advance_if(TokenKind::Colon) {
|
value: if s.advance_if(TokenKind::Colon) {
|
||||||
s.expr()
|
s.expr()?
|
||||||
} else {
|
} else {
|
||||||
let (id, is_first) = s.resolve_ident(name_tok);
|
let (id, is_first) = s.resolve_ident(name_tok);
|
||||||
Expr::Ident { pos: name_tok.start, is_ct: false, id, is_first }
|
Expr::Ident { pos: name_tok.start, is_ct: false, id, is_first }
|
||||||
},
|
},
|
||||||
}
|
})
|
||||||
}),
|
}),
|
||||||
trailing_comma: core::mem::take(&mut self.trailing_sep),
|
trailing_comma: core::mem::take(&mut self.trailing_sep),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn advance_ident(&mut self) -> Token {
|
fn advance_ident(&mut self) -> Option<Token> {
|
||||||
if matches!(self.token.kind, TokenKind::Ident | TokenKind::CtIdent) {
|
let next = self.next();
|
||||||
self.next()
|
if matches!(next.kind, TokenKind::Ident | TokenKind::CtIdent) {
|
||||||
|
Some(next)
|
||||||
} else {
|
} else {
|
||||||
self.report(
|
self.report(self.token.start, format_args!("expected identifier, found {}", next.kind))?
|
||||||
self.token.start,
|
|
||||||
format_args!("expected identifier, found {}", self.token.kind),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -567,20 +582,49 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
.collect_into(&mut self.ctx.symbols);
|
.collect_into(&mut self.ctx.symbols);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ptr_unit_expr(&mut self) -> &'a Expr<'a> {
|
fn ptr_unit_expr(&mut self) -> Option<&'a Expr<'a>> {
|
||||||
self.arena.alloc(self.unit_expr())
|
Some(self.arena.alloc(self.unit_expr()?))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_list<T: Copy>(
|
fn collect_list<T: Copy>(
|
||||||
&mut self,
|
&mut self,
|
||||||
delim: TokenKind,
|
delim: TokenKind,
|
||||||
end: TokenKind,
|
end: TokenKind,
|
||||||
mut f: impl FnMut(&mut Self) -> T,
|
mut f: impl FnMut(&mut Self) -> Option<T>,
|
||||||
) -> &'a [T] {
|
) -> &'a [T] {
|
||||||
let mut trailing_sep = false;
|
let mut trailing_sep = false;
|
||||||
let mut view = self.ctx.stack.view();
|
let mut view = self.ctx.stack.view();
|
||||||
while !self.advance_if(end) {
|
'o: while !self.advance_if(end) {
|
||||||
let val = f(self);
|
let val = match f(self) {
|
||||||
|
Some(val) => val,
|
||||||
|
None => {
|
||||||
|
let mut paren = None::<TokenKind>;
|
||||||
|
let mut depth = 0;
|
||||||
|
loop {
|
||||||
|
let tok = self.next();
|
||||||
|
if tok.kind == TokenKind::Eof {
|
||||||
|
break 'o;
|
||||||
|
}
|
||||||
|
if let Some(par) = paren {
|
||||||
|
if par == tok.kind {
|
||||||
|
depth += 1;
|
||||||
|
} else if tok.kind.closing() == par.closing() {
|
||||||
|
depth -= 1;
|
||||||
|
if depth == 0 {
|
||||||
|
paren = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if tok.kind == delim {
|
||||||
|
continue 'o;
|
||||||
|
} else if tok.kind == end {
|
||||||
|
break 'o;
|
||||||
|
} else if tok.kind.closing().is_some() && paren.is_none() {
|
||||||
|
paren = Some(tok.kind);
|
||||||
|
depth = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
trailing_sep = self.advance_if(delim);
|
trailing_sep = self.advance_if(delim);
|
||||||
unsafe { self.ctx.stack.push(&mut view, val) };
|
unsafe { self.ctx.stack.push(&mut view, val) };
|
||||||
}
|
}
|
||||||
|
@ -597,20 +641,28 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect_advance(&mut self, kind: TokenKind) -> Token {
|
#[must_use]
|
||||||
if self.token.kind != kind {
|
fn expect_advance(&mut self, kind: TokenKind) -> Option<Token> {
|
||||||
self.report(
|
let next = self.next();
|
||||||
self.token.start,
|
if next.kind != kind {
|
||||||
format_args!("expected {}, found {}", kind, self.token.kind),
|
self.report(next.start, format_args!("expected {}, found {}", kind, next.kind))?
|
||||||
);
|
} else {
|
||||||
|
Some(next)
|
||||||
}
|
}
|
||||||
self.next()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn report(&self, pos: Pos, msg: impl fmt::Display) -> ! {
|
fn report(&mut self, pos: Pos, msg: impl fmt::Display) -> Option<!> {
|
||||||
log::error!("{}", Report::new(self.lexer.source(), self.path, pos, msg));
|
if log::log_enabled!(log::Level::Error) {
|
||||||
unreachable!();
|
use core::fmt::Write;
|
||||||
|
writeln!(
|
||||||
|
self.ctx.errors.get_mut(),
|
||||||
|
"{}",
|
||||||
|
Report::new(self.lexer.source(), self.path, pos, msg)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn flag_idents(&mut self, e: Expr<'a>, flags: IdentFlags) {
|
fn flag_idents(&mut self, e: Expr<'a>, flags: IdentFlags) {
|
||||||
|
@ -988,13 +1040,25 @@ impl core::fmt::Display for Display<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct ParserCtx {
|
pub struct Ctx {
|
||||||
symbols: Symbols,
|
pub errors: RefCell<String>,
|
||||||
|
symbols: Vec<Symbol>,
|
||||||
stack: StackAlloc,
|
stack: StackAlloc,
|
||||||
idents: Vec<ScopeIdent>,
|
idents: Vec<ScopeIdent>,
|
||||||
captured: Vec<Ident>,
|
captured: Vec<Ident>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Ctx {
|
||||||
|
pub fn clear(&mut self) {
|
||||||
|
self.errors.get_mut().clear();
|
||||||
|
|
||||||
|
debug_assert_eq!(self.symbols.len(), 0);
|
||||||
|
debug_assert_eq!(self.stack.len, 0);
|
||||||
|
debug_assert_eq!(self.idents.len(), 0);
|
||||||
|
debug_assert_eq!(self.captured.len(), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct AstInner<T: ?Sized> {
|
pub struct AstInner<T: ?Sized> {
|
||||||
ref_count: AtomicUsize,
|
ref_count: AtomicUsize,
|
||||||
|
@ -1014,12 +1078,12 @@ impl AstInner<[Symbol]> {
|
||||||
.0
|
.0
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(file: Box<str>, path: &str, ctx: &mut ParserCtx, loader: Loader) -> NonNull<Self> {
|
fn new(file: Box<str>, path: Box<str>, ctx: &mut Ctx, loader: Loader) -> NonNull<Self> {
|
||||||
let arena = Arena::with_capacity(
|
let arena = Arena::with_capacity(
|
||||||
SOURCE_TO_AST_FACTOR * file.bytes().filter(|b| !b.is_ascii_whitespace()).count(),
|
SOURCE_TO_AST_FACTOR * file.bytes().filter(|b| !b.is_ascii_whitespace()).count(),
|
||||||
);
|
);
|
||||||
let exprs =
|
let exprs =
|
||||||
unsafe { core::mem::transmute(Parser::parse(ctx, &file, path, loader, &arena)) };
|
unsafe { core::mem::transmute(Parser::parse(ctx, &file, &path, loader, &arena)) };
|
||||||
|
|
||||||
crate::quad_sort(&mut ctx.symbols, |a, b| a.name.cmp(&b.name));
|
crate::quad_sort(&mut ctx.symbols, |a, b| a.name.cmp(&b.name));
|
||||||
|
|
||||||
|
@ -1033,13 +1097,14 @@ impl AstInner<[Symbol]> {
|
||||||
ref_count: AtomicUsize::new(1),
|
ref_count: AtomicUsize::new(1),
|
||||||
mem: arena.chunk.into_inner(),
|
mem: arena.chunk.into_inner(),
|
||||||
exprs,
|
exprs,
|
||||||
path: path.into(),
|
path,
|
||||||
file,
|
file,
|
||||||
symbols: (),
|
symbols: (),
|
||||||
});
|
});
|
||||||
core::ptr::addr_of_mut!((*inner).symbols)
|
core::ptr::addr_of_mut!((*inner).symbols)
|
||||||
.as_mut_ptr()
|
.as_mut_ptr()
|
||||||
.copy_from_nonoverlapping(ctx.symbols.as_ptr(), ctx.symbols.len());
|
.copy_from_nonoverlapping(ctx.symbols.as_ptr(), ctx.symbols.len());
|
||||||
|
ctx.symbols.clear();
|
||||||
|
|
||||||
NonNull::new_unchecked(inner)
|
NonNull::new_unchecked(inner)
|
||||||
}
|
}
|
||||||
|
@ -1090,8 +1155,13 @@ fn report_to(file: &str, path: &str, pos: Pos, msg: &dyn fmt::Display, out: &mut
|
||||||
pub struct Ast(NonNull<AstInner<[Symbol]>>);
|
pub struct Ast(NonNull<AstInner<[Symbol]>>);
|
||||||
|
|
||||||
impl Ast {
|
impl Ast {
|
||||||
pub fn new(path: &str, content: String, ctx: &mut ParserCtx, loader: Loader) -> Self {
|
pub fn new(
|
||||||
Self(AstInner::new(content.into(), path, ctx, loader))
|
path: impl Into<Box<str>>,
|
||||||
|
content: impl Into<Box<str>>,
|
||||||
|
ctx: &mut Ctx,
|
||||||
|
loader: Loader,
|
||||||
|
) -> Self {
|
||||||
|
Self(AstInner::new(content.into(), path.into(), ctx, loader))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn exprs(&self) -> &[Expr] {
|
pub fn exprs(&self) -> &[Expr] {
|
||||||
|
@ -1118,7 +1188,7 @@ impl Ast {
|
||||||
|
|
||||||
impl Default for Ast {
|
impl Default for Ast {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self(AstInner::new("".into(), "", &mut ParserCtx::default(), &mut no_loader))
|
Self(AstInner::new("".into(), "".into(), &mut Ctx::default(), &mut no_loader))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1327,6 +1397,15 @@ impl Arena {
|
||||||
|
|
||||||
chunk.alloc(layout).unwrap()
|
chunk.alloc(layout).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn clear(&mut self) {
|
||||||
|
let size = self.chunk.get_mut().size();
|
||||||
|
if self.chunk.get_mut().next().is_some() {
|
||||||
|
self.chunk = ArenaChunk::new(size + 1024, Default::default()).into();
|
||||||
|
} else {
|
||||||
|
self.chunk.get_mut().reset();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ArenaChunk {
|
pub struct ArenaChunk {
|
||||||
|
@ -1385,6 +1464,10 @@ impl ArenaChunk {
|
||||||
pub fn size(&self) -> usize {
|
pub fn size(&self) -> usize {
|
||||||
self.base as usize + self.size - self.end as usize + self.next().map_or(0, Self::size)
|
self.base as usize + self.size - self.end as usize + self.next().map_or(0, Self::size)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn reset(&mut self) {
|
||||||
|
self.end = unsafe { self.base.add(self.size) };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for ArenaChunk {
|
impl Drop for ArenaChunk {
|
||||||
|
|
395
lang/src/son.rs
395
lang/src/son.rs
|
@ -14,8 +14,8 @@ use {
|
||||||
reg, task,
|
reg, task,
|
||||||
ty::{self, Arg, ArrayLen, Loc, Tuple},
|
ty::{self, Arg, ArrayLen, Loc, Tuple},
|
||||||
vc::{BitSet, Vc},
|
vc::{BitSet, Vc},
|
||||||
Comptime, Func, Global, HashMap, Offset, OffsetIter, PLoc, Reloc, Sig, SymKey, TypeParser,
|
Comptime, FTask, Func, Global, HashMap, Offset, OffsetIter, PLoc, Reloc, Sig, StringRef,
|
||||||
TypedReloc, Types,
|
SymKey, TypeParser, TypedReloc, Types,
|
||||||
},
|
},
|
||||||
alloc::{borrow::ToOwned, string::String, vec::Vec},
|
alloc::{borrow::ToOwned, string::String, vec::Vec},
|
||||||
core::{
|
core::{
|
||||||
|
@ -1707,11 +1707,6 @@ fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) {
|
||||||
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
|
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct FTask {
|
|
||||||
file: FileId,
|
|
||||||
id: ty::Func,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
struct Ctx {
|
struct Ctx {
|
||||||
ty: Option<ty::Id>,
|
ty: Option<ty::Id>,
|
||||||
|
@ -1727,6 +1722,9 @@ impl Ctx {
|
||||||
struct Pool {
|
struct Pool {
|
||||||
cis: Vec<ItemCtx>,
|
cis: Vec<ItemCtx>,
|
||||||
used_cis: usize,
|
used_cis: usize,
|
||||||
|
|
||||||
|
#[expect(dead_code)]
|
||||||
|
ralloc: Regalloc,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pool {
|
impl Pool {
|
||||||
|
@ -1766,6 +1764,10 @@ impl Pool {
|
||||||
dst.scope.clear(&mut dst.nodes);
|
dst.scope.clear(&mut dst.nodes);
|
||||||
*dst = core::mem::take(&mut self.cis[self.used_cis]);
|
*dst = core::mem::take(&mut self.cis[self.used_cis]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clear(&mut self) {
|
||||||
|
debug_assert_eq!(self.used_cis, 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Regalloc {
|
struct Regalloc {
|
||||||
|
@ -1827,109 +1829,49 @@ impl Value {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Codegen<'a> {
|
pub struct CodegenCtx {
|
||||||
pub files: &'a [parser::Ast],
|
pub parser: parser::Ctx,
|
||||||
pub errors: RefCell<String>,
|
|
||||||
|
|
||||||
tasks: Vec<Option<FTask>>,
|
|
||||||
tys: Types,
|
tys: Types,
|
||||||
ci: ItemCtx,
|
|
||||||
pool: Pool,
|
pool: Pool,
|
||||||
#[expect(dead_code)]
|
|
||||||
ralloc: Regalloc,
|
|
||||||
ct: Comptime,
|
ct: Comptime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeParser for Codegen<'_> {
|
impl CodegenCtx {
|
||||||
fn tys(&mut self) -> &mut Types {
|
pub fn clear(&mut self) {
|
||||||
&mut self.tys
|
self.parser.clear();
|
||||||
}
|
self.tys.clear();
|
||||||
|
self.pool.clear();
|
||||||
fn eval_const(&mut self, file: FileId, expr: &Expr, ret: ty::Id) -> u64 {
|
self.ct.clear();
|
||||||
let mut scope = core::mem::take(&mut self.ci.scope.vars);
|
|
||||||
self.pool.push_ci(file, Some(ret), self.tasks.len(), &mut self.ci);
|
|
||||||
self.ci.scope.vars = scope;
|
|
||||||
|
|
||||||
let prev_err_len = self.errors.borrow().len();
|
|
||||||
|
|
||||||
self.expr(&Expr::Return { pos: expr.pos(), val: Some(expr) });
|
|
||||||
|
|
||||||
scope = core::mem::take(&mut self.ci.scope.vars);
|
|
||||||
self.ci.finalize();
|
|
||||||
|
|
||||||
let res = if self.errors.borrow().len() == prev_err_len {
|
|
||||||
self.emit_and_eval(file, ret, &mut [])
|
|
||||||
} else {
|
|
||||||
1
|
|
||||||
};
|
|
||||||
|
|
||||||
self.pool.pop_ci(&mut self.ci);
|
|
||||||
self.ci.scope.vars = scope;
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
fn infer_type(&mut self, expr: &Expr) -> ty::Id {
|
|
||||||
self.pool.save_ci(&self.ci);
|
|
||||||
let ty = self.expr(expr).map_or(ty::Id::NEVER, |v| v.ty);
|
|
||||||
self.pool.restore_ci(&mut self.ci);
|
|
||||||
ty
|
|
||||||
}
|
|
||||||
|
|
||||||
fn on_reuse(&mut self, existing: ty::Id) {
|
|
||||||
if let ty::Kind::Func(id) = existing.expand()
|
|
||||||
&& let func = &mut self.tys.ins.funcs[id as usize]
|
|
||||||
&& let Err(idx) = task::unpack(func.offset)
|
|
||||||
&& idx < self.tasks.len()
|
|
||||||
{
|
|
||||||
func.offset = task::id(self.tasks.len());
|
|
||||||
let task = self.tasks[idx].take();
|
|
||||||
self.tasks.push(task);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_global(&mut self, file: FileId, name: Ident, expr: &Expr) -> ty::Id {
|
pub struct Codegen<'a> {
|
||||||
let gid = self.tys.ins.globals.len() as ty::Global;
|
pub files: &'a [parser::Ast],
|
||||||
self.tys.ins.globals.push(Global { file, name, ..Default::default() });
|
pub errors: &'a RefCell<String>,
|
||||||
|
tys: &'a mut Types,
|
||||||
let ty = ty::Kind::Global(gid);
|
ci: ItemCtx,
|
||||||
self.pool.push_ci(file, None, self.tasks.len(), &mut self.ci);
|
pool: &'a mut Pool,
|
||||||
let prev_err_len = self.errors.borrow().len();
|
ct: &'a mut Comptime,
|
||||||
|
|
||||||
self.expr(&(Expr::Return { pos: expr.pos(), val: Some(expr) }));
|
|
||||||
|
|
||||||
self.ci.finalize();
|
|
||||||
|
|
||||||
let ret = self.ci.ret.expect("for return type to be infered");
|
|
||||||
if self.errors.borrow().len() == prev_err_len {
|
|
||||||
let mut mem = vec![0u8; self.tys.size_of(ret) as usize];
|
|
||||||
self.emit_and_eval(file, ret, &mut mem);
|
|
||||||
self.tys.ins.globals[gid as usize].data = mem;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.pool.pop_ci(&mut self.ci);
|
|
||||||
self.tys.ins.globals[gid as usize].ty = ret;
|
|
||||||
|
|
||||||
ty.compress()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn report(&self, pos: Pos, msg: impl Display) -> ty::Id {
|
|
||||||
self.report(pos, msg);
|
|
||||||
ty::Id::NEVER
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_local_ty(&mut self, ident: Ident) -> Option<ty::Id> {
|
|
||||||
self.ci.scope.vars.iter().rfind(|v| (v.id == ident && v.value() == NEVER)).map(|v| v.ty)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Codegen<'a> {
|
impl<'a> Codegen<'a> {
|
||||||
|
pub fn new(files: &'a [parser::Ast], ctx: &'a mut CodegenCtx) -> Self {
|
||||||
|
Self {
|
||||||
|
files,
|
||||||
|
errors: &ctx.parser.errors,
|
||||||
|
tys: &mut ctx.tys,
|
||||||
|
ci: Default::default(),
|
||||||
|
pool: &mut ctx.pool,
|
||||||
|
ct: &mut ctx.ct,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn emit_and_eval(&mut self, file: FileId, ret: ty::Id, ret_loc: &mut [u8]) -> u64 {
|
fn emit_and_eval(&mut self, file: FileId, ret: ty::Id, ret_loc: &mut [u8]) -> u64 {
|
||||||
if !self.complete_call_graph() {
|
if !self.complete_call_graph() {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.ci.emit_body(&mut self.tys, self.files, Sig { args: Tuple::empty(), ret });
|
self.ci.emit_body(self.tys, self.files, Sig { args: Tuple::empty(), ret });
|
||||||
self.ci.code.truncate(self.ci.code.len() - instrs::jala(0, 0, 0).0);
|
self.ci.code.truncate(self.ci.code.len() - instrs::jala(0, 0, 0).0);
|
||||||
self.ci.emit(instrs::tx());
|
self.ci.emit(instrs::tx());
|
||||||
|
|
||||||
|
@ -2006,12 +1948,12 @@ impl<'a> Codegen<'a> {
|
||||||
debug_assert_ne!(region, VOID);
|
debug_assert_ne!(region, VOID);
|
||||||
debug_assert_ne!({ self.ci.nodes[region].ty }, ty::Id::VOID, "{:?}", {
|
debug_assert_ne!({ self.ci.nodes[region].ty }, ty::Id::VOID, "{:?}", {
|
||||||
self.ci.nodes[region].lock_rc = Nid::MAX;
|
self.ci.nodes[region].lock_rc = Nid::MAX;
|
||||||
self.ci.nodes.graphviz_in_browser(&self.tys, self.files);
|
self.ci.nodes.graphviz_in_browser(self.tys, self.files);
|
||||||
});
|
});
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
self.ci.nodes[region].kind != Kind::Load || self.ci.nodes[region].ty.is_pointer(),
|
self.ci.nodes[region].kind != Kind::Load || self.ci.nodes[region].ty.is_pointer(),
|
||||||
"{:?} {} {}",
|
"{:?} {} {}",
|
||||||
self.ci.nodes.graphviz_in_browser(&self.tys, self.files),
|
self.ci.nodes.graphviz_in_browser(self.tys, self.files),
|
||||||
self.cfile().path,
|
self.cfile().path,
|
||||||
self.ty_display(self.ci.nodes[region].ty)
|
self.ty_display(self.ci.nodes[region].ty)
|
||||||
);
|
);
|
||||||
|
@ -2043,8 +1985,8 @@ impl<'a> Codegen<'a> {
|
||||||
fn make_func_reachable(&mut self, func: ty::Func) {
|
fn make_func_reachable(&mut self, func: ty::Func) {
|
||||||
let fuc = &mut self.tys.ins.funcs[func as usize];
|
let fuc = &mut self.tys.ins.funcs[func as usize];
|
||||||
if fuc.offset == u32::MAX {
|
if fuc.offset == u32::MAX {
|
||||||
fuc.offset = task::id(self.tasks.len() as _);
|
fuc.offset = task::id(self.tys.tasks.len() as _);
|
||||||
self.tasks.push(Some(FTask { file: fuc.file, id: func }));
|
self.tys.tasks.push(Some(FTask { file: fuc.file, id: func }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2114,12 +2056,18 @@ impl<'a> Codegen<'a> {
|
||||||
crate::endoce_string(literal, &mut data, report).unwrap();
|
crate::endoce_string(literal, &mut data, report).unwrap();
|
||||||
|
|
||||||
let ty = self.tys.make_ptr(ty::Id::U8);
|
let ty = self.tys.make_ptr(ty::Id::U8);
|
||||||
let global = match self.tys.ins.strings.entry(data.clone()) {
|
let global = match self.tys.strings.entry(&data, &self.tys.ins.globals) {
|
||||||
hash_map::Entry::Occupied(occupied_entry) => *occupied_entry.get(),
|
(hash_map::RawEntryMut::Occupied(occupied_entry), _) => {
|
||||||
hash_map::Entry::Vacant(vacant_entry) => {
|
occupied_entry.get_key_value().0.value.0
|
||||||
|
}
|
||||||
|
(hash_map::RawEntryMut::Vacant(vacant_entry), hash) => {
|
||||||
let global = self.tys.ins.globals.len() as ty::Global;
|
let global = self.tys.ins.globals.len() as ty::Global;
|
||||||
self.tys.ins.globals.push(Global { data, ty, ..Default::default() });
|
self.tys.ins.globals.push(Global { data, ty, ..Default::default() });
|
||||||
*vacant_entry.insert(global)
|
vacant_entry
|
||||||
|
.insert(crate::ctx_map::Key { value: StringRef(global), hash }, ())
|
||||||
|
.0
|
||||||
|
.value
|
||||||
|
.0
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let global = self.ci.nodes.new_node(ty, Kind::Global { global }, [VOID]);
|
let global = self.ci.nodes.new_node(ty, Kind::Global { global }, [VOID]);
|
||||||
|
@ -2204,7 +2152,7 @@ impl<'a> Codegen<'a> {
|
||||||
return Value::NEVER;
|
return Value::NEVER;
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some((offset, ty)) = OffsetIter::offset_of(&self.tys, s, name) else {
|
let Some((offset, ty)) = OffsetIter::offset_of(self.tys, s, name) else {
|
||||||
let field_list = self
|
let field_list = self
|
||||||
.tys
|
.tys
|
||||||
.struct_fields(s)
|
.struct_fields(s)
|
||||||
|
@ -2267,7 +2215,7 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
Expr::BinOp { left, op: TokenKind::Decl, right, .. } => {
|
Expr::BinOp { left, op: TokenKind::Decl, right, .. } => {
|
||||||
let mut right = self.expr(right)?;
|
let mut right = self.expr(right)?;
|
||||||
if right.ty.loc(&self.tys) == Loc::Stack {
|
if right.ty.loc(self.tys) == Loc::Stack {
|
||||||
let stck = self.ci.nodes.new_node_nop(right.ty, Kind::Stck, [VOID, MEM]);
|
let stck = self.ci.nodes.new_node_nop(right.ty, Kind::Stck, [VOID, MEM]);
|
||||||
self.store_mem(stck, right.ty, right.id);
|
self.store_mem(stck, right.ty, right.id);
|
||||||
right.id = stck;
|
right.id = stck;
|
||||||
|
@ -2424,7 +2372,7 @@ impl<'a> Codegen<'a> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
match ty.loc(&self.tys) {
|
match ty.loc(self.tys) {
|
||||||
Loc::Reg if core::mem::take(&mut val.ptr) => val.id = self.load_mem(val.id, ty),
|
Loc::Reg if core::mem::take(&mut val.ptr) => val.id = self.load_mem(val.id, ty),
|
||||||
Loc::Stack if !val.ptr => {
|
Loc::Stack if !val.ptr => {
|
||||||
let stack = self.ci.nodes.new_node_nop(ty, Kind::Stck, [VOID, MEM]);
|
let stack = self.ci.nodes.new_node_nop(ty, Kind::Stck, [VOID, MEM]);
|
||||||
|
@ -2504,7 +2452,7 @@ impl<'a> Codegen<'a> {
|
||||||
let mut has_ptr_arg = false;
|
let mut has_ptr_arg = false;
|
||||||
for arg in args {
|
for arg in args {
|
||||||
let value = self.expr(arg)?;
|
let value = self.expr(arg)?;
|
||||||
has_ptr_arg |= value.ty.has_pointers(&self.tys);
|
has_ptr_arg |= value.ty.has_pointers(self.tys);
|
||||||
self.tys.tmp.args.push(value.ty);
|
self.tys.tmp.args.push(value.ty);
|
||||||
debug_assert_ne!(self.ci.nodes[value.id].kind, Kind::Stre);
|
debug_assert_ne!(self.ci.nodes[value.id].kind, Kind::Stre);
|
||||||
self.ci.nodes.lock(value.id);
|
self.ci.nodes.lock(value.id);
|
||||||
|
@ -2532,7 +2480,7 @@ impl<'a> Codegen<'a> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let alt_value = match ty.loc(&self.tys) {
|
let alt_value = match ty.loc(self.tys) {
|
||||||
Loc::Reg => None,
|
Loc::Reg => None,
|
||||||
Loc::Stack => {
|
Loc::Stack => {
|
||||||
let stck = self.ci.nodes.new_node_nop(ty, Kind::Stck, [VOID, MEM]);
|
let stck = self.ci.nodes.new_node_nop(ty, Kind::Stck, [VOID, MEM]);
|
||||||
|
@ -2550,7 +2498,6 @@ impl<'a> Codegen<'a> {
|
||||||
|
|
||||||
alt_value.or(Some(Value::new(self.ci.ctrl).ty(ty)))
|
alt_value.or(Some(Value::new(self.ci.ctrl).ty(ty)))
|
||||||
}
|
}
|
||||||
//Expr::Directive { name: "inline", args: [func, args @ ..], .. }
|
|
||||||
Expr::Call { func, args, .. } => {
|
Expr::Call { func, args, .. } => {
|
||||||
self.ci.call_count += 1;
|
self.ci.call_count += 1;
|
||||||
let ty = self.ty(func);
|
let ty = self.ty(func);
|
||||||
|
@ -2588,11 +2535,11 @@ impl<'a> Codegen<'a> {
|
||||||
let mut cargs = cargs.iter();
|
let mut cargs = cargs.iter();
|
||||||
let mut args = args.iter();
|
let mut args = args.iter();
|
||||||
let mut has_ptr_arg = false;
|
let mut has_ptr_arg = false;
|
||||||
while let Some(ty) = tys.next(&self.tys) {
|
while let Some(ty) = tys.next(self.tys) {
|
||||||
let carg = cargs.next().unwrap();
|
let carg = cargs.next().unwrap();
|
||||||
let arg = args.next().unwrap();
|
let Some(arg) = args.next() else { break };
|
||||||
let Arg::Value(ty) = ty else { continue };
|
let Arg::Value(ty) = ty else { continue };
|
||||||
has_ptr_arg |= ty.has_pointers(&self.tys);
|
has_ptr_arg |= ty.has_pointers(self.tys);
|
||||||
|
|
||||||
let mut value = self.expr_ctx(arg, Ctx::default().with_ty(ty))?;
|
let mut value = self.expr_ctx(arg, Ctx::default().with_ty(ty))?;
|
||||||
debug_assert_ne!(self.ci.nodes[value.id].kind, Kind::Stre);
|
debug_assert_ne!(self.ci.nodes[value.id].kind, Kind::Stre);
|
||||||
|
@ -2621,7 +2568,7 @@ impl<'a> Codegen<'a> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let alt_value = match sig.ret.loc(&self.tys) {
|
let alt_value = match sig.ret.loc(self.tys) {
|
||||||
Loc::Reg => None,
|
Loc::Reg => None,
|
||||||
Loc::Stack => {
|
Loc::Stack => {
|
||||||
let stck = self.ci.nodes.new_node_nop(sig.ret, Kind::Stck, [VOID, MEM]);
|
let stck = self.ci.nodes.new_node_nop(sig.ret, Kind::Stck, [VOID, MEM]);
|
||||||
|
@ -2681,9 +2628,9 @@ impl<'a> Codegen<'a> {
|
||||||
let mut args = args.iter();
|
let mut args = args.iter();
|
||||||
let mut cargs = cargs.iter();
|
let mut cargs = cargs.iter();
|
||||||
let var_base = self.ci.scope.vars.len();
|
let var_base = self.ci.scope.vars.len();
|
||||||
while let Some(aty) = tys.next(&self.tys) {
|
while let Some(aty) = tys.next(self.tys) {
|
||||||
let arg = args.next().unwrap();
|
|
||||||
let carg = cargs.next().unwrap();
|
let carg = cargs.next().unwrap();
|
||||||
|
let Some(arg) = args.next() else { break };
|
||||||
match aty {
|
match aty {
|
||||||
Arg::Type(id) => {
|
Arg::Type(id) => {
|
||||||
self.ci.scope.vars.push(Variable::new(
|
self.ci.scope.vars.push(Variable::new(
|
||||||
|
@ -2769,9 +2716,9 @@ impl<'a> Codegen<'a> {
|
||||||
match sty.expand() {
|
match sty.expand() {
|
||||||
ty::Kind::Struct(s) => {
|
ty::Kind::Struct(s) => {
|
||||||
let mem = self.ci.nodes.new_node(sty, Kind::Stck, [VOID, MEM]);
|
let mem = self.ci.nodes.new_node(sty, Kind::Stck, [VOID, MEM]);
|
||||||
let mut offs = OffsetIter::new(s, &self.tys);
|
let mut offs = OffsetIter::new(s, self.tys);
|
||||||
for field in fields {
|
for field in fields {
|
||||||
let Some((ty, offset)) = offs.next_ty(&self.tys) else {
|
let Some((ty, offset)) = offs.next_ty(self.tys) else {
|
||||||
self.report(
|
self.report(
|
||||||
field.pos(),
|
field.pos(),
|
||||||
"this init argumen overflows the field count",
|
"this init argumen overflows the field count",
|
||||||
|
@ -2787,7 +2734,7 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let field_list = offs
|
let field_list = offs
|
||||||
.into_iter(&self.tys)
|
.into_iter(self.tys)
|
||||||
.map(|(f, ..)| self.tys.names.ident_str(f.name))
|
.map(|(f, ..)| self.tys.names.ident_str(f.name))
|
||||||
.intersperse(", ")
|
.intersperse(", ")
|
||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
|
@ -2877,8 +2824,8 @@ impl<'a> Codegen<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: dont allocate
|
// TODO: dont allocate
|
||||||
let mut offs = OffsetIter::new(s, &self.tys)
|
let mut offs = OffsetIter::new(s, self.tys)
|
||||||
.into_iter(&self.tys)
|
.into_iter(self.tys)
|
||||||
.map(|(f, o)| (f.ty, o))
|
.map(|(f, o)| (f.ty, o))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let mem = self.ci.nodes.new_node(sty, Kind::Stck, [VOID, MEM]);
|
let mem = self.ci.nodes.new_node(sty, Kind::Stck, [VOID, MEM]);
|
||||||
|
@ -3131,7 +3078,10 @@ impl<'a> Codegen<'a> {
|
||||||
|
|
||||||
Some(Value::VOID)
|
Some(Value::VOID)
|
||||||
}
|
}
|
||||||
ref e => self.report_unhandled_ast(e, "bruh"),
|
ref e => {
|
||||||
|
self.report_unhandled_ast(e, "bruh");
|
||||||
|
Some(Value::VOID)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3144,8 +3094,8 @@ impl<'a> Codegen<'a> {
|
||||||
lhs: Nid,
|
lhs: Nid,
|
||||||
rhs: Nid,
|
rhs: Nid,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut offs = OffsetIter::new(s, &self.tys);
|
let mut offs = OffsetIter::new(s, self.tys);
|
||||||
while let Some((ty, off)) = offs.next_ty(&self.tys) {
|
while let Some((ty, off)) = offs.next_ty(self.tys) {
|
||||||
let lhs = self.offset(lhs, off);
|
let lhs = self.offset(lhs, off);
|
||||||
let rhs = self.offset(rhs, off);
|
let rhs = self.offset(rhs, off);
|
||||||
let dst = self.offset(dst, off);
|
let dst = self.offset(dst, off);
|
||||||
|
@ -3268,7 +3218,7 @@ impl<'a> Codegen<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
for &CtorField { pos, name, ref value } in fields {
|
for &CtorField { pos, name, ref value } in fields {
|
||||||
let Some((offset, ty)) = OffsetIter::offset_of(&self.tys, idx, name) else {
|
let Some((offset, ty)) = OffsetIter::offset_of(self.tys, idx, name) else {
|
||||||
self.report(pos, format_args!("field not found: {name:?}"));
|
self.report(pos, format_args!("field not found: {name:?}"));
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
@ -3387,8 +3337,8 @@ impl<'a> Codegen<'a> {
|
||||||
|
|
||||||
fn complete_call_graph(&mut self) -> bool {
|
fn complete_call_graph(&mut self) -> bool {
|
||||||
let prev_err_len = self.errors.borrow().len();
|
let prev_err_len = self.errors.borrow().len();
|
||||||
while self.ci.task_base < self.tasks.len()
|
while self.ci.task_base < self.tys.tasks.len()
|
||||||
&& let Some(task_slot) = self.tasks.pop()
|
&& let Some(task_slot) = self.tys.tasks.pop()
|
||||||
{
|
{
|
||||||
let Some(task) = task_slot else { continue };
|
let Some(task) = task_slot else { continue };
|
||||||
self.emit_func(task);
|
self.emit_func(task);
|
||||||
|
@ -3413,7 +3363,7 @@ impl<'a> Codegen<'a> {
|
||||||
|
|
||||||
let mut tys = sig.args.args();
|
let mut tys = sig.args.args();
|
||||||
let mut args = args.iter();
|
let mut args = args.iter();
|
||||||
while let Some(aty) = tys.next(&self.tys) {
|
while let Some(aty) = tys.next(self.tys) {
|
||||||
let arg = args.next().unwrap();
|
let arg = args.next().unwrap();
|
||||||
match aty {
|
match aty {
|
||||||
Arg::Type(ty) => {
|
Arg::Type(ty) => {
|
||||||
|
@ -3427,12 +3377,12 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
Arg::Value(ty) => {
|
Arg::Value(ty) => {
|
||||||
let mut deps = Vc::from([VOID]);
|
let mut deps = Vc::from([VOID]);
|
||||||
if ty.loc(&self.tys) == Loc::Stack && self.tys.size_of(ty) <= 16 {
|
if ty.loc(self.tys) == Loc::Stack && self.tys.size_of(ty) <= 16 {
|
||||||
deps.push(MEM);
|
deps.push(MEM);
|
||||||
}
|
}
|
||||||
// TODO: whe we not using the deps?
|
// TODO: whe we not using the deps?
|
||||||
let value = self.ci.nodes.new_node_nop(ty, Kind::Arg, deps);
|
let value = self.ci.nodes.new_node_nop(ty, Kind::Arg, deps);
|
||||||
let ptr = ty.loc(&self.tys) == Loc::Stack;
|
let ptr = ty.loc(self.tys) == Loc::Stack;
|
||||||
self.ci.scope.vars.push(Variable::new(
|
self.ci.scope.vars.push(Variable::new(
|
||||||
arg.id,
|
arg.id,
|
||||||
ty,
|
ty,
|
||||||
|
@ -3457,7 +3407,7 @@ impl<'a> Codegen<'a> {
|
||||||
self.ci.finalize();
|
self.ci.finalize();
|
||||||
|
|
||||||
if self.errors.borrow().len() == prev_err_len {
|
if self.errors.borrow().len() == prev_err_len {
|
||||||
self.ci.emit_body(&mut self.tys, self.files, sig);
|
self.ci.emit_body(self.tys, self.files, sig);
|
||||||
self.tys.ins.funcs[id as usize].code.append(&mut self.ci.code);
|
self.tys.ins.funcs[id as usize].code.append(&mut self.ci.code);
|
||||||
self.tys.ins.funcs[id as usize].relocs.append(&mut self.ci.relocs);
|
self.tys.ins.funcs[id as usize].relocs.append(&mut self.ci.relocs);
|
||||||
}
|
}
|
||||||
|
@ -3470,7 +3420,7 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ty_display(&self, ty: ty::Id) -> ty::Display {
|
fn ty_display(&self, ty: ty::Id) -> ty::Display {
|
||||||
ty::Display::new(&self.tys, self.files, ty)
|
ty::Display::new(self.tys, self.files, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ast_display(&self, ast: &'a Expr<'a>) -> parser::Display<'a> {
|
fn ast_display(&self, ast: &'a Expr<'a>) -> parser::Display<'a> {
|
||||||
|
@ -3563,18 +3513,96 @@ impl<'a> Codegen<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn report_unhandled_ast(&self, ast: &Expr, hint: impl Display) -> ! {
|
fn report_unhandled_ast(&self, ast: &Expr, hint: impl Display) {
|
||||||
log::info!("{ast:#?}");
|
log::info!("{ast:#?}");
|
||||||
self.fatal_report(ast.pos(), fa!("compiler does not (yet) know how to handle ({hint})"));
|
self.report(ast.pos(), fa!("compiler does not (yet) know how to handle ({hint})"));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cfile(&self) -> &'a parser::Ast {
|
fn cfile(&self) -> &'a parser::Ast {
|
||||||
&self.files[self.ci.file as usize]
|
&self.files[self.ci.file as usize]
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn fatal_report(&self, pos: Pos, msg: impl Display) -> ! {
|
impl TypeParser for Codegen<'_> {
|
||||||
|
fn tys(&mut self) -> &mut Types {
|
||||||
|
self.tys
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_const(&mut self, file: FileId, expr: &Expr, ret: ty::Id) -> u64 {
|
||||||
|
let mut scope = core::mem::take(&mut self.ci.scope.vars);
|
||||||
|
self.pool.push_ci(file, Some(ret), self.tys.tasks.len(), &mut self.ci);
|
||||||
|
self.ci.scope.vars = scope;
|
||||||
|
|
||||||
|
let prev_err_len = self.errors.borrow().len();
|
||||||
|
|
||||||
|
self.expr(&Expr::Return { pos: expr.pos(), val: Some(expr) });
|
||||||
|
|
||||||
|
scope = core::mem::take(&mut self.ci.scope.vars);
|
||||||
|
self.ci.finalize();
|
||||||
|
|
||||||
|
let res = if self.errors.borrow().len() == prev_err_len {
|
||||||
|
self.emit_and_eval(file, ret, &mut [])
|
||||||
|
} else {
|
||||||
|
1
|
||||||
|
};
|
||||||
|
|
||||||
|
self.pool.pop_ci(&mut self.ci);
|
||||||
|
self.ci.scope.vars = scope;
|
||||||
|
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
fn infer_type(&mut self, expr: &Expr) -> ty::Id {
|
||||||
|
self.pool.save_ci(&self.ci);
|
||||||
|
let ty = self.expr(expr).map_or(ty::Id::NEVER, |v| v.ty);
|
||||||
|
self.pool.restore_ci(&mut self.ci);
|
||||||
|
ty
|
||||||
|
}
|
||||||
|
|
||||||
|
fn on_reuse(&mut self, existing: ty::Id) {
|
||||||
|
if let ty::Kind::Func(id) = existing.expand()
|
||||||
|
&& let func = &mut self.tys.ins.funcs[id as usize]
|
||||||
|
&& let Err(idx) = task::unpack(func.offset)
|
||||||
|
&& idx < self.tys.tasks.len()
|
||||||
|
{
|
||||||
|
func.offset = task::id(self.tys.tasks.len());
|
||||||
|
let task = self.tys.tasks[idx].take();
|
||||||
|
self.tys.tasks.push(task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_global(&mut self, file: FileId, name: Ident, expr: &Expr) -> ty::Id {
|
||||||
|
let gid = self.tys.ins.globals.len() as ty::Global;
|
||||||
|
self.tys.ins.globals.push(Global { file, name, ..Default::default() });
|
||||||
|
|
||||||
|
let ty = ty::Kind::Global(gid);
|
||||||
|
self.pool.push_ci(file, None, self.tys.tasks.len(), &mut self.ci);
|
||||||
|
let prev_err_len = self.errors.borrow().len();
|
||||||
|
|
||||||
|
self.expr(&(Expr::Return { pos: expr.pos(), val: Some(expr) }));
|
||||||
|
|
||||||
|
self.ci.finalize();
|
||||||
|
|
||||||
|
let ret = self.ci.ret.expect("for return type to be infered");
|
||||||
|
if self.errors.borrow().len() == prev_err_len {
|
||||||
|
let mut mem = vec![0u8; self.tys.size_of(ret) as usize];
|
||||||
|
self.emit_and_eval(file, ret, &mut mem);
|
||||||
|
self.tys.ins.globals[gid as usize].data = mem;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.pool.pop_ci(&mut self.ci);
|
||||||
|
self.tys.ins.globals[gid as usize].ty = ret;
|
||||||
|
|
||||||
|
ty.compress()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn report(&self, pos: Pos, msg: impl Display) -> ty::Id {
|
||||||
self.report(pos, msg);
|
self.report(pos, msg);
|
||||||
panic!("{}", self.errors.borrow());
|
ty::Id::NEVER
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_local_ty(&mut self, ident: Ident) -> Option<ty::Id> {
|
||||||
|
self.ci.scope.vars.iter().rfind(|v| (v.id == ident && v.value() == NEVER)).map(|v| v.ty)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4373,17 +4401,108 @@ fn common_dom(mut a: Nid, mut b: Nid, nodes: &mut Nodes) -> Nid {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use {
|
use {
|
||||||
|
super::{Codegen, CodegenCtx},
|
||||||
|
crate::{
|
||||||
|
lexer::TokenKind,
|
||||||
|
parser::{self},
|
||||||
|
},
|
||||||
alloc::{string::String, vec::Vec},
|
alloc::{string::String, vec::Vec},
|
||||||
core::fmt::Write,
|
core::{fmt::Write, hash::BuildHasher, ops::Range},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct Rand(pub u64);
|
||||||
|
|
||||||
|
impl Rand {
|
||||||
|
pub fn next(&mut self) -> u64 {
|
||||||
|
self.0 = crate::FnvBuildHasher::default().hash_one(self.0);
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn range(&mut self, min: u64, max: u64) -> u64 {
|
||||||
|
self.next() % (max - min) + min
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct FuncGen {
|
||||||
|
rand: Rand,
|
||||||
|
buf: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FuncGen {
|
||||||
|
fn gen(&mut self, seed: u64) -> &str {
|
||||||
|
self.rand = Rand(seed);
|
||||||
|
self.buf.clear();
|
||||||
|
self.buf.push_str("main := fn(): void { return ");
|
||||||
|
self.expr().unwrap();
|
||||||
|
self.buf.push('}');
|
||||||
|
&self.buf
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expr(&mut self) -> core::fmt::Result {
|
||||||
|
match self.rand.range(0, 100) {
|
||||||
|
0..80 => {
|
||||||
|
write!(self.buf, "{}", self.rand.next())
|
||||||
|
}
|
||||||
|
80..100 => {
|
||||||
|
self.expr()?;
|
||||||
|
let ops = [
|
||||||
|
TokenKind::Add,
|
||||||
|
TokenKind::Sub,
|
||||||
|
TokenKind::Mul,
|
||||||
|
TokenKind::Div,
|
||||||
|
TokenKind::Shl,
|
||||||
|
TokenKind::Eq,
|
||||||
|
TokenKind::Ne,
|
||||||
|
TokenKind::Lt,
|
||||||
|
TokenKind::Gt,
|
||||||
|
TokenKind::Le,
|
||||||
|
TokenKind::Ge,
|
||||||
|
TokenKind::Band,
|
||||||
|
TokenKind::Bor,
|
||||||
|
TokenKind::Xor,
|
||||||
|
TokenKind::Mod,
|
||||||
|
TokenKind::Shr,
|
||||||
|
];
|
||||||
|
let op = ops[self.rand.range(0, ops.len() as u64) as usize];
|
||||||
|
write!(self.buf, " {op} ")?;
|
||||||
|
self.expr()
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fuzz(seed_range: Range<u64>) {
|
||||||
|
let mut gen = FuncGen::default();
|
||||||
|
let mut ctx = CodegenCtx::default();
|
||||||
|
for i in seed_range {
|
||||||
|
ctx.clear();
|
||||||
|
let src = gen.gen(i);
|
||||||
|
let parsed = parser::Ast::new("fuzz", src, &mut ctx.parser, &mut parser::no_loader);
|
||||||
|
|
||||||
|
let mut cdg = Codegen::new(core::slice::from_ref(&parsed), &mut ctx);
|
||||||
|
cdg.generate(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[ignore]
|
||||||
|
fn fuzz_test() {
|
||||||
|
_ = log::set_logger(&crate::fs::Logger);
|
||||||
|
log::set_max_level(log::LevelFilter::Info);
|
||||||
|
fuzz(0..10000);
|
||||||
|
}
|
||||||
|
|
||||||
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
||||||
_ = log::set_logger(&crate::fs::Logger);
|
_ = log::set_logger(&crate::fs::Logger);
|
||||||
// log::set_max_level(log::LevelFilter::Info);
|
log::set_max_level(log::LevelFilter::Info);
|
||||||
//log::set_max_level(log::LevelFilter::Trace);
|
//log::set_max_level(log::LevelFilter::Trace);
|
||||||
|
|
||||||
let (ref files, embeds) = crate::test_parse_files(ident, input);
|
let mut ctx = CodegenCtx::default();
|
||||||
let mut codegen = super::Codegen { files, ..Default::default() };
|
let (ref files, embeds) = crate::test_parse_files(ident, input, &mut ctx.parser);
|
||||||
|
let mut codegen = super::Codegen::new(files, &mut ctx);
|
||||||
codegen.push_embeds(embeds);
|
codegen.push_embeds(embeds);
|
||||||
|
|
||||||
codegen.generate(0);
|
codegen.generate(0);
|
||||||
|
|
Loading…
Reference in a new issue