forked from AbleOS/holey-bytes
fixing arithmetic bug
This commit is contained in:
parent
659ccbd637
commit
2660d976fe
|
@ -20,7 +20,7 @@ unsafe extern "C" fn fmt() {
|
||||||
|
|
||||||
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
||||||
let mut ctx = parser::ParserCtx::default();
|
let mut ctx = parser::ParserCtx::default();
|
||||||
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut |_, _| Ok(0), &arena);
|
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
|
||||||
|
|
||||||
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
||||||
fmt::fmt_file(exprs, code, &mut f).unwrap();
|
fmt::fmt_file(exprs, code, &mut f).unwrap();
|
||||||
|
|
|
@ -55,7 +55,10 @@ unsafe fn compile_and_run(mut fuel: usize) {
|
||||||
let files = {
|
let files = {
|
||||||
let mut ctx = hblang::parser::ParserCtx::default();
|
let mut ctx = hblang::parser::ParserCtx::default();
|
||||||
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
||||||
let mut loader = |path: &str, _: &str| Ok(paths.binary_search(&path).unwrap() as FileId);
|
let mut loader = |path: &str, _: &str, kind| match kind {
|
||||||
|
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap() as FileId),
|
||||||
|
hblang::parser::FileKind::Embed => Err("embeds are not supported".into()),
|
||||||
|
};
|
||||||
files
|
files
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|f| {
|
.map(|f| {
|
||||||
|
|
|
@ -131,6 +131,12 @@ fib := fn(n: int): int {
|
||||||
main := fn(): int {
|
main := fn(): int {
|
||||||
a := 1
|
a := 1
|
||||||
b := &a
|
b := &a
|
||||||
|
|
||||||
|
boundary := 1000
|
||||||
|
|
||||||
|
b = b + boundary - 2
|
||||||
|
b = b - (boundary - 2)
|
||||||
|
|
||||||
modify(b)
|
modify(b)
|
||||||
drop(a)
|
drop(a)
|
||||||
return *b - 2
|
return *b - 2
|
||||||
|
|
|
@ -727,6 +727,7 @@ mod trap {
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Codegen {
|
pub struct Codegen {
|
||||||
pub files: Vec<parser::Ast>,
|
pub files: Vec<parser::Ast>,
|
||||||
|
pub embeds: Vec<Vec<u8>>,
|
||||||
tasks: Vec<Option<FTask>>,
|
tasks: Vec<Option<FTask>>,
|
||||||
|
|
||||||
tys: Types,
|
tys: Types,
|
||||||
|
@ -1615,6 +1616,7 @@ impl Codegen {
|
||||||
E::BinOp { left, op, right } if op != T::Decl => 'ops: {
|
E::BinOp { left, op, right } if op != T::Decl => 'ops: {
|
||||||
let left = self.expr_ctx(left, Ctx {
|
let left = self.expr_ctx(left, Ctx {
|
||||||
ty: ctx.ty.filter(|_| op.is_homogenous()),
|
ty: ctx.ty.filter(|_| op.is_homogenous()),
|
||||||
|
check: ctx.check,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -2743,6 +2745,7 @@ impl Codegen {
|
||||||
} else {
|
} else {
|
||||||
let dty = self.ty_display(ty);
|
let dty = self.ty_display(ty);
|
||||||
let dexpected = self.ty_display(expected);
|
let dexpected = self.ty_display(expected);
|
||||||
|
log::info!("mode: {:?}", kind);
|
||||||
self.report(pos, format_args!("expected {hint} of type {dexpected}, got {dty}",));
|
self.report(pos, format_args!("expected {hint} of type {dexpected}, got {dty}",));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2814,7 +2817,7 @@ mod tests {
|
||||||
|
|
||||||
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
fn generate(ident: &'static str, input: &'static str, output: &mut String) {
|
||||||
_ = log::set_logger(&crate::fs::Logger);
|
_ = log::set_logger(&crate::fs::Logger);
|
||||||
log::set_max_level(log::LevelFilter::Error);
|
log::set_max_level(log::LevelFilter::Debug);
|
||||||
|
|
||||||
let mut codegen =
|
let mut codegen =
|
||||||
super::Codegen { files: crate::test_parse_files(ident, input), ..Default::default() };
|
super::Codegen { files: crate::test_parse_files(ident, input), ..Default::default() };
|
||||||
|
|
|
@ -210,6 +210,7 @@ impl<'a> Formatter<'a> {
|
||||||
Expr::String { literal, .. } => f.write_str(literal),
|
Expr::String { literal, .. } => f.write_str(literal),
|
||||||
Expr::Comment { literal, .. } => f.write_str(literal),
|
Expr::Comment { literal, .. } => f.write_str(literal),
|
||||||
Expr::Mod { path, .. } => write!(f, "@use(\"{path}\")"),
|
Expr::Mod { path, .. } => write!(f, "@use(\"{path}\")"),
|
||||||
|
Expr::Embed { path, .. } => write!(f, "@embed(\"{path}\")"),
|
||||||
Expr::Field { target, name: field, .. } => {
|
Expr::Field { target, name: field, .. } => {
|
||||||
self.fmt_paren(target, f, postfix)?;
|
self.fmt_paren(target, f, postfix)?;
|
||||||
f.write_str(".")?;
|
f.write_str(".")?;
|
||||||
|
@ -366,13 +367,20 @@ impl<'a> Formatter<'a> {
|
||||||
self.fmt(right, f)
|
self.fmt(right, f)
|
||||||
}
|
}
|
||||||
Expr::BinOp { right, op, left } => {
|
Expr::BinOp { right, op, left } => {
|
||||||
let pec_miss = |e: &Expr| {
|
let prec_miss_left = |e: &Expr| {
|
||||||
matches!(
|
matches!(
|
||||||
e, Expr::BinOp { op: lop, .. } if op.precedence() > lop.precedence()
|
e, Expr::BinOp { op: lop, .. } if op.precedence() > lop.precedence()
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
let prec_miss_right = |e: &Expr| {
|
||||||
|
matches!(
|
||||||
|
e, Expr::BinOp { op: lop, .. }
|
||||||
|
if (op.precedence() == lop.precedence() && !op.is_comutative())
|
||||||
|
|| op.precedence() > lop.precedence()
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
self.fmt_paren(left, f, pec_miss)?;
|
self.fmt_paren(left, f, prec_miss_left)?;
|
||||||
if let Some(mut prev) = self.source.get(..right.pos() as usize) {
|
if let Some(mut prev) = self.source.get(..right.pos() as usize) {
|
||||||
prev = prev.trim_end();
|
prev = prev.trim_end();
|
||||||
let estimate_bound =
|
let estimate_bound =
|
||||||
|
@ -396,7 +404,7 @@ impl<'a> Formatter<'a> {
|
||||||
f.write_str(op.name())?;
|
f.write_str(op.name())?;
|
||||||
f.write_str(" ")?;
|
f.write_str(" ")?;
|
||||||
}
|
}
|
||||||
self.fmt_paren(right, f, pec_miss)
|
self.fmt_paren(right, f, prec_miss_right)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -452,7 +460,8 @@ pub mod test {
|
||||||
let len = crate::fmt::minify(&mut minned);
|
let len = crate::fmt::minify(&mut minned);
|
||||||
minned.truncate(len);
|
minned.truncate(len);
|
||||||
|
|
||||||
let ast = parser::Ast::new(ident, minned, &mut ParserCtx::default(), &mut |_, _| Ok(0));
|
let ast =
|
||||||
|
parser::Ast::new(ident, minned, &mut ParserCtx::default(), &mut parser::no_loader);
|
||||||
//log::error!(
|
//log::error!(
|
||||||
// "{} / {} = {} | {} / {} = {}",
|
// "{} / {} = {} | {} / {} = {}",
|
||||||
// ast.mem.size(),
|
// ast.mem.size(),
|
||||||
|
|
118
lang/src/fs.rs
118
lang/src/fs.rs
|
@ -1,7 +1,7 @@
|
||||||
use {
|
use {
|
||||||
crate::{
|
crate::{
|
||||||
codegen,
|
codegen,
|
||||||
parser::{self, Ast, ParserCtx},
|
parser::{self, Ast, FileKind, ParserCtx},
|
||||||
},
|
},
|
||||||
alloc::{string::String, vec::Vec},
|
alloc::{string::String, vec::Vec},
|
||||||
core::{fmt::Write, num::NonZeroUsize},
|
core::{fmt::Write, num::NonZeroUsize},
|
||||||
|
@ -10,7 +10,9 @@ use {
|
||||||
collections::VecDeque,
|
collections::VecDeque,
|
||||||
eprintln,
|
eprintln,
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
io::{self, Write as _},
|
io::{
|
||||||
|
Write as _, {self},
|
||||||
|
},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
string::ToString,
|
string::ToString,
|
||||||
sync::Mutex,
|
sync::Mutex,
|
||||||
|
@ -79,15 +81,16 @@ pub fn run_compiler(root_file: &str, options: Options, out: &mut Vec<u8>) -> std
|
||||||
}
|
}
|
||||||
|
|
||||||
if options.fmt {
|
if options.fmt {
|
||||||
for parsed in parsed {
|
for parsed in parsed.ast {
|
||||||
format_ast(parsed)?;
|
format_ast(parsed)?;
|
||||||
}
|
}
|
||||||
} else if options.fmt_stdout {
|
} else if options.fmt_stdout {
|
||||||
let ast = parsed.into_iter().next().unwrap();
|
let ast = parsed.ast.into_iter().next().unwrap();
|
||||||
write!(out, "{ast}").unwrap();
|
write!(out, "{ast}").unwrap();
|
||||||
} else {
|
} else {
|
||||||
let mut codegen = codegen::Codegen::default();
|
let mut codegen = codegen::Codegen::default();
|
||||||
codegen.files = parsed;
|
codegen.files = parsed.ast;
|
||||||
|
codegen.embeds = parsed.embeds;
|
||||||
|
|
||||||
codegen.generate(0);
|
codegen.generate(0);
|
||||||
if options.dump_asm {
|
if options.dump_asm {
|
||||||
|
@ -187,7 +190,12 @@ impl<T> TaskQueueInner<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
|
pub struct Loaded {
|
||||||
|
ast: Vec<Ast>,
|
||||||
|
embeds: Vec<Vec<u8>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||||
fn resolve(path: &str, from: &str, tmp: &mut PathBuf) -> Result<PathBuf, CantLoadFile> {
|
fn resolve(path: &str, from: &str, tmp: &mut PathBuf) -> Result<PathBuf, CantLoadFile> {
|
||||||
tmp.clear();
|
tmp.clear();
|
||||||
match Path::new(from).parent() {
|
match Path::new(from).parent() {
|
||||||
|
@ -224,44 +232,73 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
|
||||||
|
|
||||||
type Task = (u32, PathBuf);
|
type Task = (u32, PathBuf);
|
||||||
|
|
||||||
let seen = Mutex::new(crate::HashMap::<PathBuf, u32>::default());
|
let seen_modules = Mutex::new(crate::HashMap::<PathBuf, u32>::default());
|
||||||
|
let seen_embeds = Mutex::new(crate::HashMap::<PathBuf, u32>::default());
|
||||||
let tasks = TaskQueue::<Task>::new(extra_threads + 1);
|
let tasks = TaskQueue::<Task>::new(extra_threads + 1);
|
||||||
let ast = Mutex::new(Vec::<io::Result<Ast>>::new());
|
let ast = Mutex::new(Vec::<io::Result<Ast>>::new());
|
||||||
|
let embeds = Mutex::new(Vec::<Vec<u8>>::new());
|
||||||
|
|
||||||
let loader = |path: &str, from: &str, tmp: &mut _| {
|
let loader = |path: &str, from: &str, kind: FileKind, tmp: &mut _| {
|
||||||
if path.starts_with("rel:") {
|
|
||||||
return Err(io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
"`rel:` prefix was removed and is now equivalent to no prefix (remove it)"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut physiscal_path = resolve(path, from, tmp)?;
|
let mut physiscal_path = resolve(path, from, tmp)?;
|
||||||
|
|
||||||
let id = {
|
match kind {
|
||||||
let mut seen = seen.lock().unwrap();
|
FileKind::Module => {
|
||||||
let len = seen.len();
|
let id = {
|
||||||
match seen.entry(physiscal_path) {
|
let mut seen = seen_modules.lock().unwrap();
|
||||||
hash_map::Entry::Occupied(entry) => {
|
let len = seen.len();
|
||||||
return Ok(*entry.get());
|
match seen.entry(physiscal_path) {
|
||||||
}
|
hash_map::Entry::Occupied(entry) => {
|
||||||
hash_map::Entry::Vacant(entry) => {
|
return Ok(*entry.get());
|
||||||
physiscal_path = entry.insert_entry(len as _).key().clone();
|
}
|
||||||
len as u32
|
hash_map::Entry::Vacant(entry) => {
|
||||||
|
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||||
|
len as u32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !physiscal_path.exists() {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::NotFound,
|
||||||
|
format!("can't find file: {}", display_rel_path(&physiscal_path)),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tasks.push((id, physiscal_path));
|
||||||
|
Ok(id)
|
||||||
}
|
}
|
||||||
};
|
FileKind::Embed => {
|
||||||
|
let id = {
|
||||||
|
let mut seen = seen_embeds.lock().unwrap();
|
||||||
|
let len = seen.len();
|
||||||
|
match seen.entry(physiscal_path) {
|
||||||
|
hash_map::Entry::Occupied(entry) => {
|
||||||
|
return Ok(*entry.get());
|
||||||
|
}
|
||||||
|
hash_map::Entry::Vacant(entry) => {
|
||||||
|
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||||
|
len as u32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if !physiscal_path.exists() {
|
let content = std::fs::read(&physiscal_path).map_err(|e| {
|
||||||
return Err(io::Error::new(
|
io::Error::new(
|
||||||
io::ErrorKind::NotFound,
|
e.kind(),
|
||||||
format!("can't find file: {}", display_rel_path(&physiscal_path)),
|
format!(
|
||||||
));
|
"can't load embed file: {}: {e}",
|
||||||
|
display_rel_path(&physiscal_path)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let mut embeds = embeds.lock().unwrap();
|
||||||
|
if id as usize >= embeds.len() {
|
||||||
|
embeds.resize(id as usize + 1, Default::default());
|
||||||
|
}
|
||||||
|
embeds[id as usize] = content;
|
||||||
|
Ok(id)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.push((id, physiscal_path));
|
|
||||||
Ok(id)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let execute_task = |ctx: &mut _, (_, path): Task, tmp: &mut _| {
|
let execute_task = |ctx: &mut _, (_, path): Task, tmp: &mut _| {
|
||||||
|
@ -271,8 +308,8 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
|
||||||
format!("path contains invalid characters: {}", display_rel_path(&path)),
|
format!("path contains invalid characters: {}", display_rel_path(&path)),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from| {
|
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| {
|
||||||
loader(path, from, tmp).map_err(|e| e.to_string())
|
loader(path, from, kind, tmp).map_err(|e| e.to_string())
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -291,7 +328,7 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
|
||||||
let path = Path::new(root).canonicalize().map_err(|e| {
|
let path = Path::new(root).canonicalize().map_err(|e| {
|
||||||
io::Error::new(e.kind(), format!("can't canonicalize root file path ({root})"))
|
io::Error::new(e.kind(), format!("can't canonicalize root file path ({root})"))
|
||||||
})?;
|
})?;
|
||||||
seen.lock().unwrap().insert(path.clone(), 0);
|
seen_modules.lock().unwrap().insert(path.clone(), 0);
|
||||||
tasks.push((0, path));
|
tasks.push((0, path));
|
||||||
|
|
||||||
if extra_threads == 0 {
|
if extra_threads == 0 {
|
||||||
|
@ -300,7 +337,10 @@ pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Vec<Ast>> {
|
||||||
std::thread::scope(|s| (0..extra_threads + 1).for_each(|_| _ = s.spawn(thread)));
|
std::thread::scope(|s| (0..extra_threads + 1).for_each(|_| _ = s.spawn(thread)));
|
||||||
}
|
}
|
||||||
|
|
||||||
ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()
|
Ok(Loaded {
|
||||||
|
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
|
||||||
|
embeds: embeds.into_inner().unwrap(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {
|
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {
|
||||||
|
|
|
@ -249,7 +249,7 @@ mod ty {
|
||||||
lexer::TokenKind,
|
lexer::TokenKind,
|
||||||
parser::{self, Pos},
|
parser::{self, Pos},
|
||||||
},
|
},
|
||||||
core::{default, num::NonZeroU32, ops::Range},
|
core::{num::NonZeroU32, ops::Range},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type ArrayLen = u32;
|
pub type ArrayLen = u32;
|
||||||
|
@ -384,7 +384,7 @@ mod ty {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Default, Debug)]
|
#[derive(PartialEq, Eq, Default, Debug, Clone, Copy)]
|
||||||
pub enum TyCheck {
|
pub enum TyCheck {
|
||||||
BinOp,
|
BinOp,
|
||||||
#[default]
|
#[default]
|
||||||
|
@ -1345,7 +1345,10 @@ pub fn run_test(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn test_parse_files(ident: &'static str, input: &'static str) -> Vec<parser::Ast> {
|
fn test_parse_files(ident: &'static str, input: &'static str) -> Vec<parser::Ast> {
|
||||||
use std::{borrow::ToOwned, string::ToString};
|
use {
|
||||||
|
self::parser::FileKind,
|
||||||
|
std::{borrow::ToOwned, string::ToString},
|
||||||
|
};
|
||||||
|
|
||||||
fn find_block(mut input: &'static str, test_name: &'static str) -> &'static str {
|
fn find_block(mut input: &'static str, test_name: &'static str) -> &'static str {
|
||||||
const CASE_PREFIX: &str = "#### ";
|
const CASE_PREFIX: &str = "#### ";
|
||||||
|
@ -1385,7 +1388,8 @@ fn test_parse_files(ident: &'static str, input: &'static str) -> Vec<parser::Ast
|
||||||
fmt::test::format(ident, input[last_start..].trim());
|
fmt::test::format(ident, input[last_start..].trim());
|
||||||
module_map.push((last_module_name, input[last_start..].trim()));
|
module_map.push((last_module_name, input[last_start..].trim()));
|
||||||
|
|
||||||
let mut loader = |path: &str, _: &str| {
|
let mut loader = |path: &str, _: &str, kind| {
|
||||||
|
assert_eq!(kind, FileKind::Module);
|
||||||
module_map
|
module_map
|
||||||
.iter()
|
.iter()
|
||||||
.position(|&(name, _)| name == path)
|
.position(|&(name, _)| name == path)
|
||||||
|
|
|
@ -23,7 +23,13 @@ pub type Symbols = Vec<Symbol>;
|
||||||
pub type FileId = u32;
|
pub type FileId = u32;
|
||||||
pub type IdentIndex = u16;
|
pub type IdentIndex = u16;
|
||||||
pub type LoaderError = String;
|
pub type LoaderError = String;
|
||||||
pub type Loader<'a> = &'a mut (dyn FnMut(&str, &str) -> Result<FileId, LoaderError> + 'a);
|
pub type Loader<'a> = &'a mut (dyn FnMut(&str, &str, FileKind) -> Result<FileId, LoaderError> + 'a);
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Debug)]
|
||||||
|
pub enum FileKind {
|
||||||
|
Module,
|
||||||
|
Embed,
|
||||||
|
}
|
||||||
|
|
||||||
pub const SOURCE_TO_AST_FACTOR: usize = 7 * (core::mem::size_of::<usize>() / 4) + 1;
|
pub const SOURCE_TO_AST_FACTOR: usize = 7 * (core::mem::size_of::<usize>() / 4) + 1;
|
||||||
|
|
||||||
|
@ -44,8 +50,8 @@ pub mod idfl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn no_loader(_: &str, _: &str) -> Result<FileId, LoaderError> {
|
pub fn no_loader(_: &str, _: &str, _: FileKind) -> Result<FileId, LoaderError> {
|
||||||
Err(String::new())
|
Ok(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -276,7 +282,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
E::Mod {
|
E::Mod {
|
||||||
pos,
|
pos,
|
||||||
path,
|
path,
|
||||||
id: match (self.loader)(path, self.path) {
|
id: match (self.loader)(path, self.path, FileKind::Module) {
|
||||||
Ok(id) => id,
|
Ok(id) => id,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
self.report(str.start, format_args!("error loading dependency: {e:#}"))
|
self.report(str.start, format_args!("error loading dependency: {e:#}"))
|
||||||
|
@ -284,6 +290,23 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
T::Directive if self.lexer.slice(token.range()) == "embed" => {
|
||||||
|
self.expect_advance(TokenKind::LParen);
|
||||||
|
let str = self.expect_advance(TokenKind::DQuote);
|
||||||
|
self.expect_advance(TokenKind::RParen);
|
||||||
|
let path = self.lexer.slice(str.range());
|
||||||
|
let path = &path[1..path.len() - 1];
|
||||||
|
|
||||||
|
E::Embed {
|
||||||
|
pos,
|
||||||
|
path,
|
||||||
|
id: match (self.loader)(path, self.path, FileKind::Embed) {
|
||||||
|
Ok(id) => id,
|
||||||
|
Err(e) => self
|
||||||
|
.report(str.start, format_args!("error loading embedded file: {e:#}")),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
T::Directive => E::Directive {
|
T::Directive => E::Directive {
|
||||||
pos: pos - 1, // need to undo the directive shift
|
pos: pos - 1, // need to undo the directive shift
|
||||||
name: self.tok_str(token),
|
name: self.tok_str(token),
|
||||||
|
@ -816,6 +839,12 @@ generate_expr! {
|
||||||
id: FileId,
|
id: FileId,
|
||||||
path: &'a str,
|
path: &'a str,
|
||||||
},
|
},
|
||||||
|
/// `'@use' '(' String ')'`
|
||||||
|
Embed {
|
||||||
|
pos: Pos,
|
||||||
|
id: FileId,
|
||||||
|
path: &'a str,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue