fixing arithmetic bug

This commit is contained in:
Jakub Doka 2024-10-13 15:22:16 +02:00
parent 659ccbd637
commit 2660d976fe
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
8 changed files with 148 additions and 54 deletions

View file

@ -20,7 +20,7 @@ unsafe extern "C" fn fmt() {
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
let mut ctx = parser::ParserCtx::default();
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut |_, _| Ok(0), &arena);
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
fmt::fmt_file(exprs, code, &mut f).unwrap();

View file

@ -55,7 +55,10 @@ unsafe fn compile_and_run(mut fuel: usize) {
let files = {
let mut ctx = hblang::parser::ParserCtx::default();
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
let mut loader = |path: &str, _: &str| Ok(paths.binary_search(&path).unwrap() as FileId);
let mut loader = |path: &str, _: &str, kind| match kind {
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap() as FileId),
hblang::parser::FileKind::Embed => Err("embeds are not supported".into()),
};
files
.into_iter()
.map(|f| {

View file

@ -131,6 +131,12 @@ fib := fn(n: int): int {
main := fn(): int {
a := 1
b := &a
boundary := 1000
b = b + boundary - 2
b = b - (boundary - 2)
modify(b)
drop(a)
return *b - 2

View file

@ -727,6 +727,7 @@ mod trap {
#[derive(Default)]
pub struct Codegen {
pub files: Vec<parser::Ast>,
pub embeds: Vec<Vec<u8>>,
tasks: Vec<Option<FTask>>,
tys: Types,
@ -1615,6 +1616,7 @@ impl Codegen {
E::BinOp { left, op, right } if op != T::Decl => 'ops: {
let left = self.expr_ctx(left, Ctx {
ty: ctx.ty.filter(|_| op.is_homogenous()),
check: ctx.check,
..Default::default()
})?;
@ -2743,6 +2745,7 @@ impl Codegen {
} else {
let dty = self.ty_display(ty);
let dexpected = self.ty_display(expected);
log::info!("mode: {:?}", kind);
self.report(pos, format_args!("expected {hint} of type {dexpected}, got {dty}",));
}
}
@ -2814,7 +2817,7 @@ mod tests {
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
_ = log::set_logger(&crate::fs::Logger);
log::set_max_level(log::LevelFilter::Error);
log::set_max_level(log::LevelFilter::Debug);
let mut codegen =
super::Codegen { files: crate::test_parse_files(ident, input), ..Default::default() };

View file

@ -210,6 +210,7 @@ impl<'a> Formatter<'a> {
Expr::String { literal, .. } => f.write_str(literal),
Expr::Comment { literal, .. } => f.write_str(literal),
Expr::Mod { path, .. } => write!(f, "@use(\"{path}\")"),
Expr::Embed { path, .. } => write!(f, "@embed(\"{path}\")"),
Expr::Field { target, name: field, .. } => {
self.fmt_paren(target, f, postfix)?;
f.write_str(".")?;
@ -366,13 +367,20 @@ impl<'a> Formatter<'a> {
self.fmt(right, f)
}
Expr::BinOp { right, op, left } => {
let pec_miss = |e: &Expr| {
let prec_miss_left = |e: &Expr| {
matches!(
e, Expr::BinOp { op: lop, .. } if op.precedence() > lop.precedence()
)
};
let prec_miss_right = |e: &Expr| {
matches!(
e, Expr::BinOp { op: lop, .. }
if (op.precedence() == lop.precedence() && !op.is_comutative())
|| op.precedence() > lop.precedence()
)
};
self.fmt_paren(left, f, pec_miss)?;
self.fmt_paren(left, f, prec_miss_left)?;
if let Some(mut prev) = self.source.get(..right.pos() as usize) {
prev = prev.trim_end();
let estimate_bound =
@ -396,7 +404,7 @@ impl<'a> Formatter<'a> {
f.write_str(op.name())?;
f.write_str(" ")?;
}
self.fmt_paren(right, f, pec_miss)
self.fmt_paren(right, f, prec_miss_right)
}
}
}
@ -452,7 +460,8 @@ pub mod test {
let len = crate::fmt::minify(&mut minned);
minned.truncate(len);
let ast = parser::Ast::new(ident, minned, &mut ParserCtx::default(), &mut |_, _| Ok(0));
let ast =
parser::Ast::new(ident, minned, &mut ParserCtx::default(), &mut parser::no_loader);
//log::error!(
// "{} / {} = {} | {} / {} = {}",
// ast.mem.size(),

View file

@ -1,7 +1,7 @@
use {
crate::{
codegen,
parser::{self, Ast, ParserCtx},
parser::{self, Ast, FileKind, ParserCtx},
},
alloc::{string::String, vec::Vec},
core::{fmt::Write, num::NonZeroUsize},
@ -10,7 +10,9 @@ use {
collections::VecDeque,
eprintln,
ffi::OsStr,
io::{self, Write as _},
io::{
Write as _, {self},
},
path::{Path, PathBuf},
string::ToString,
sync::Mutex,
@ -79,15 +81,16 @@ pub fn run_compiler(root_file: &str, options: Options, out: &mut Vec<u8>) -> std
}
if options.fmt {
for parsed in parsed {
for parsed in parsed.ast {
format_ast(parsed)?;
}
} else if options.fmt_stdout {
let ast = parsed.into_iter().next().unwrap();
let ast = parsed.ast.into_iter().next().unwrap();
write!(out, "{ast}").unwrap();
} else {
let mut codegen = codegen::Codegen::default();
codegen.files = parsed;
codegen.files = parsed.ast;
codegen.embeds = parsed.embeds;
codegen.generate(0);
if options.dump_asm {
@ -187,7 +190,12 @@ impl<T> TaskQueueInner<T> {
}
}
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
pub struct Loaded {
ast: Vec<Ast>,
embeds: Vec<Vec<u8>>,
}
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
fn resolve(path: &str, from: &str, tmp: &mut PathBuf) -> Result<PathBuf, CantLoadFile> {
tmp.clear();
match Path::new(from).parent() {
@ -224,44 +232,73 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
type Task = (u32, PathBuf);
let seen = Mutex::new(crate::HashMap::<PathBuf, u32>::default());
let seen_modules = Mutex::new(crate::HashMap::<PathBuf, u32>::default());
let seen_embeds = Mutex::new(crate::HashMap::<PathBuf, u32>::default());
let tasks = TaskQueue::<Task>::new(extra_threads + 1);
let ast = Mutex::new(Vec::<io::Result<Ast>>::new());
let embeds = Mutex::new(Vec::<Vec<u8>>::new());
let loader = |path: &str, from: &str, tmp: &mut _| {
if path.starts_with("rel:") {
return Err(io::Error::new(
io::ErrorKind::Other,
"`rel:` prefix was removed and is now equivalent to no prefix (remove it)"
.to_string(),
));
}
let loader = |path: &str, from: &str, kind: FileKind, tmp: &mut _| {
let mut physiscal_path = resolve(path, from, tmp)?;
let id = {
let mut seen = seen.lock().unwrap();
let len = seen.len();
match seen.entry(physiscal_path) {
hash_map::Entry::Occupied(entry) => {
return Ok(*entry.get());
}
hash_map::Entry::Vacant(entry) => {
physiscal_path = entry.insert_entry(len as _).key().clone();
len as u32
match kind {
FileKind::Module => {
let id = {
let mut seen = seen_modules.lock().unwrap();
let len = seen.len();
match seen.entry(physiscal_path) {
hash_map::Entry::Occupied(entry) => {
return Ok(*entry.get());
}
hash_map::Entry::Vacant(entry) => {
physiscal_path = entry.insert_entry(len as _).key().clone();
len as u32
}
}
};
if !physiscal_path.exists() {
return Err(io::Error::new(
io::ErrorKind::NotFound,
format!("can't find file: {}", display_rel_path(&physiscal_path)),
));
}
tasks.push((id, physiscal_path));
Ok(id)
}
};
FileKind::Embed => {
let id = {
let mut seen = seen_embeds.lock().unwrap();
let len = seen.len();
match seen.entry(physiscal_path) {
hash_map::Entry::Occupied(entry) => {
return Ok(*entry.get());
}
hash_map::Entry::Vacant(entry) => {
physiscal_path = entry.insert_entry(len as _).key().clone();
len as u32
}
}
};
if !physiscal_path.exists() {
return Err(io::Error::new(
io::ErrorKind::NotFound,
format!("can't find file: {}", display_rel_path(&physiscal_path)),
));
let content = std::fs::read(&physiscal_path).map_err(|e| {
io::Error::new(
e.kind(),
format!(
"can't load embed file: {}: {e}",
display_rel_path(&physiscal_path)
),
)
})?;
let mut embeds = embeds.lock().unwrap();
if id as usize >= embeds.len() {
embeds.resize(id as usize + 1, Default::default());
}
embeds[id as usize] = content;
Ok(id)
}
}
tasks.push((id, physiscal_path));
Ok(id)
};
let execute_task = |ctx: &mut _, (_, path): Task, tmp: &mut _| {
@ -271,8 +308,8 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
format!("path contains invalid characters: {}", display_rel_path(&path)),
)
})?;
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from| {
loader(path, from, tmp).map_err(|e| e.to_string())
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| {
loader(path, from, kind, tmp).map_err(|e| e.to_string())
}))
};
@ -291,7 +328,7 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
let path = Path::new(root).canonicalize().map_err(|e| {
io::Error::new(e.kind(), format!("can't canonicalize root file path ({root})"))
})?;
seen.lock().unwrap().insert(path.clone(), 0);
seen_modules.lock().unwrap().insert(path.clone(), 0);
tasks.push((0, path));
if extra_threads == 0 {
@ -300,7 +337,10 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
std::thread::scope(|s| (0..extra_threads + 1).for_each(|_| _ = s.spawn(thread)));
}
ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()
Ok(Loaded {
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
embeds: embeds.into_inner().unwrap(),
})
}
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {

View file

@ -249,7 +249,7 @@ mod ty {
lexer::TokenKind,
parser::{self, Pos},
},
core::{default, num::NonZeroU32, ops::Range},
core::{num::NonZeroU32, ops::Range},
};
pub type ArrayLen = u32;
@ -384,7 +384,7 @@ mod ty {
}
}
#[derive(PartialEq, Eq, Default, Debug)]
#[derive(PartialEq, Eq, Default, Debug, Clone, Copy)]
pub enum TyCheck {
BinOp,
#[default]
@ -1345,7 +1345,10 @@ pub fn run_test(
#[cfg(test)]
fn test_parse_files(ident: &'static str, input: &'static str) -> Vec<parser::Ast> {
use std::{borrow::ToOwned, string::ToString};
use {
self::parser::FileKind,
std::{borrow::ToOwned, string::ToString},
};
fn find_block(mut input: &'static str, test_name: &'static str) -> &'static str {
const CASE_PREFIX: &str = "#### ";
@ -1385,7 +1388,8 @@ fn test_parse_files(ident: &'static str, input: &'static str) -> Vec<parser::Ast
fmt::test::format(ident, input[last_start..].trim());
module_map.push((last_module_name, input[last_start..].trim()));
let mut loader = |path: &str, _: &str| {
let mut loader = |path: &str, _: &str, kind| {
assert_eq!(kind, FileKind::Module);
module_map
.iter()
.position(|&(name, _)| name == path)

View file

@ -23,7 +23,13 @@ pub type Symbols = Vec<Symbol>;
pub type FileId = u32;
pub type IdentIndex = u16;
pub type LoaderError = String;
pub type Loader<'a> = &'a mut (dyn FnMut(&str, &str) -> Result<FileId, LoaderError> + 'a);
pub type Loader<'a> = &'a mut (dyn FnMut(&str, &str, FileKind) -> Result<FileId, LoaderError> + 'a);
#[derive(PartialEq, Eq, Debug)]
pub enum FileKind {
Module,
Embed,
}
pub const SOURCE_TO_AST_FACTOR: usize = 7 * (core::mem::size_of::<usize>() / 4) + 1;
@ -44,8 +50,8 @@ pub mod idfl {
}
}
pub fn no_loader(_: &str, _: &str) -> Result<FileId, LoaderError> {
Err(String::new())
pub fn no_loader(_: &str, _: &str, _: FileKind) -> Result<FileId, LoaderError> {
Ok(0)
}
#[derive(Debug)]
@ -276,7 +282,7 @@ impl<'a, 'b> Parser<'a, 'b> {
E::Mod {
pos,
path,
id: match (self.loader)(path, self.path) {
id: match (self.loader)(path, self.path, FileKind::Module) {
Ok(id) => id,
Err(e) => {
self.report(str.start, format_args!("error loading dependency: {e:#}"))
@ -284,6 +290,23 @@ impl<'a, 'b> Parser<'a, 'b> {
},
}
}
T::Directive if self.lexer.slice(token.range()) == "embed" => {
self.expect_advance(TokenKind::LParen);
let str = self.expect_advance(TokenKind::DQuote);
self.expect_advance(TokenKind::RParen);
let path = self.lexer.slice(str.range());
let path = &path[1..path.len() - 1];
E::Embed {
pos,
path,
id: match (self.loader)(path, self.path, FileKind::Embed) {
Ok(id) => id,
Err(e) => self
.report(str.start, format_args!("error loading embedded file: {e:#}")),
},
}
}
T::Directive => E::Directive {
pos: pos - 1, // need to undo the directive shift
name: self.tok_str(token),
@ -816,6 +839,12 @@ generate_expr! {
id: FileId,
path: &'a str,
},
/// `'@use' '(' String ')'`
Embed {
pos: Pos,
id: FileId,
path: &'a str,
},
}
}