1
1
Fork 0
mirror of https://github.com/azur1s/bobbylisp.git synced 2024-10-16 02:37:40 -05:00

Compare commits

...

2 commits

Author SHA1 Message Date
Natapat Samutpong a8aa4569c3 Update ex.hyc 2022-03-06 22:15:07 +07:00
Natapat Samutpong 87b3c7717b massive amount of refactor
- parser and lexer now have span
- split into mini-crates
2022-03-06 22:04:48 +07:00
18 changed files with 488 additions and 611 deletions

38
Cargo.lock generated
View file

@ -11,6 +11,15 @@ dependencies = [
"const-random", "const-random",
] ]
[[package]]
name = "ariadne"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1cb2a2046bea8ce5e875551f5772024882de0b540c7f93dfc5d6cf1ca8b030c"
dependencies = [
"yansi",
]
[[package]] [[package]]
name = "atty" name = "atty"
version = "0.2.14" version = "0.2.14"
@ -109,9 +118,9 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.2.4" version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
@ -143,8 +152,9 @@ dependencies = [
name = "hycron" name = "hycron"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"chumsky",
"clap", "clap",
"lexer",
"parser",
] ]
[[package]] [[package]]
@ -163,6 +173,13 @@ version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "lexer"
version = "0.1.0"
dependencies = [
"chumsky",
]
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.117" version = "0.2.117"
@ -184,6 +201,15 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "parser"
version = "0.1.0"
dependencies = [
"ariadne",
"chumsky",
"lexer",
]
[[package]] [[package]]
name = "proc-macro-error" name = "proc-macro-error"
version = "1.0.4" version = "1.0.4"
@ -321,3 +347,9 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "yansi"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71"

View file

@ -1,10 +1,6 @@
[package] [workspace]
name = "hycron" members = [
version = "0.1.0" "crates/main",
edition = "2021" "crates/lexer",
"crates/parser",
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html ]
[dependencies]
clap = { version = "3.0.14", features = ["derive"] }
chumsky = "0.8.0"

9
crates/lexer/Cargo.toml Normal file
View file

@ -0,0 +1,9 @@
[package]
name = "lexer"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
chumsky = "0.8.0"

132
crates/lexer/src/lib.rs Normal file
View file

@ -0,0 +1,132 @@
use chumsky::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Token {
// Keywords
KwLet, KwFun,
KwDo, KwEnd,
KwIf, KwThen, KwElse,
// Literals
Int(i64), Float(String), Boolean(bool),
String(String), Identifier(String),
// Operators
Plus, Minus, Multiply, Divide,
Not, Equal, NotEqual, Less, Greater,
// Symbols & Delimiters
Assign,
Dot, Comma,
Colon, SemiColon,
OpenParen, CloseParen,
}
impl std::fmt::Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Token::KwLet => write!(f, "let"),
Token::KwFun => write!(f, "fun"),
Token::KwDo => write!(f, "do"),
Token::KwEnd => write!(f, "end"),
Token::KwIf => write!(f, "if"),
Token::KwThen => write!(f, "then"),
Token::KwElse => write!(f, "else"),
Token::Int(i) => write!(f, "{}", i),
Token::Float(s) => write!(f, "{}", s),
Token::Boolean(b) => write!(f, "{}", b),
Token::String(s) => write!(f, "{}", s),
Token::Identifier(s) => write!(f, "{}", s),
Token::Plus => write!(f, "+"),
Token::Minus => write!(f, "-"),
Token::Multiply => write!(f, "*"),
Token::Divide => write!(f, "/"),
Token::Not => write!(f, "!"),
Token::Equal => write!(f, "=="),
Token::NotEqual => write!(f, "!="),
Token::Less => write!(f, "<"),
Token::Greater => write!(f, ">"),
Token::Assign => write!(f, "="),
Token::Dot => write!(f, "."),
Token::Comma => write!(f, ","),
Token::Colon => write!(f, ":"),
Token::SemiColon => write!(f, ";"),
Token::OpenParen => write!(f, "("),
Token::CloseParen => write!(f, ")"),
}
}
}
pub type Span = std::ops::Range<usize>;
pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = Simple<char>> {
let int = text::int(10)
.map(|s: String| Token::Int(s.parse().unwrap()));
let float = text::int(10)
.chain(just('.'))
.chain::<char, _, _>(text::digits(10))
.collect::<String>()
.map(Token::Float);
let string = just('"')
.ignore_then(filter(|c| *c != '"').repeated())
.then_ignore(just('"'))
.collect::<String>()
.map(Token::String);
let symbol = choice((
just('+').to(Token::Plus),
just('-').to(Token::Minus),
just('*').to(Token::Multiply),
just('/').to(Token::Divide),
just('!').to(Token::Not),
just("==").to(Token::Equal),
just('<').to(Token::Less),
just('>').to(Token::Greater),
just('=').to(Token::Assign),
just('.').to(Token::Dot),
just(',').to(Token::Comma),
just(':').to(Token::Colon),
just(';').to(Token::SemiColon),
just('(').to(Token::OpenParen),
just(')').to(Token::CloseParen),
));
let keyword = text::ident().map(|s: String| match s.as_str() {
"true" => Token::Boolean(true),
"false" => Token::Boolean(false),
"let" => Token::KwLet,
"fun" => Token::KwFun,
"do" => Token::KwDo,
"end" => Token::KwEnd,
"if" => Token::KwIf,
"then" => Token::KwThen,
"else" => Token::KwElse,
_ => Token::Identifier(s),
});
let token = int
.or(float)
.or(string)
.or(symbol)
.or(keyword)
.recover_with(skip_then_retry_until([]));
let comment = just("--")
.ignore_then(filter(|c| *c != '\n').repeated())
.then_ignore(just('\n'));
token
.padded_by(comment.repeated())
.map_with_span(|token, span| (token, span))
.padded()
.repeated()
}
pub fn lex(src: String) -> (Option<Vec<(Token, std::ops::Range<usize>)>>, Vec<Simple<char>>) {
let (tokens, lex_error) = lexer().parse_recovery(src.as_str());
return (tokens, lex_error);
}

11
crates/main/Cargo.toml Normal file
View file

@ -0,0 +1,11 @@
[package]
name = "hycron"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
clap = { version = "3.0.14", features = ["derive"] }
lexer = { path = "../lexer" }
parser = { path = "../parser" }

44
crates/main/src/main.rs Normal file
View file

@ -0,0 +1,44 @@
use std::fs;
use clap::Parser as ArgParser;
use lexer::lex;
use parser::parse;
pub mod args;
use args::{Args, Options};
pub mod util;
use crate::util::log;
fn main() {
let args = Args::parse();
match args.options {
Options::Compile {
input: file_name,
ast: _print_ast,
} => {
// Get file contents.
let src = fs::read_to_string(&file_name).expect("Failed to read file");
// Lex the file.
let (tokens, lex_error) = lex(src.clone());
if lex_error.is_empty() {
log(0, "Lexing successful.");
let (ast, parse_error) = parse(tokens.unwrap(), src.chars().count());
if parse_error.is_empty() {
println!("{:#?}", ast);
log(0, "Parsing successful.");
} else {
println!("{:#?}", parse_error);
log(2, "Parsing failed.");
}
} else {
println!("{:#?}", lex_error);
log(2, "Lexing failed.");
}
}
}
}

11
crates/parser/Cargo.toml Normal file
View file

@ -0,0 +1,11 @@
[package]
name = "parser"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lexer = { path = "../lexer" }
chumsky = "0.8.0"
ariadne = "0.1.5"

231
crates/parser/src/lib.rs Normal file
View file

@ -0,0 +1,231 @@
use chumsky::{prelude::*, Stream};
use lexer::Token;
pub type Spanned<T> = (T, std::ops::Range<usize>);
#[derive(Clone, Debug)]
pub enum Expr {
Int(i64), Float(f64), Boolean(bool),
String(String), Identifier(String),
Unary { op: String, rhs: Box<Spanned<Self>> },
Binary { lhs: Box<Spanned<Self>>, op: String, rhs: Box<Spanned<Self>> },
Call { name: Box<Spanned<Self>>, args: Spanned<Vec<Spanned<Self>>> },
Let {
name: String,
type_hint: String,
value: Box<Spanned<Self>>,
},
Fun {
name: String,
type_hint: String,
args: Spanned<Vec<(Spanned<String>, Spanned<String>)>>,
body: Box<Spanned<Self>>
},
If {
cond: Box<Spanned<Self>>,
then: Box<Spanned<Self>>,
else_: Box<Spanned<Self>>
},
Do {
body: Vec<Spanned<Self>>
},
}
fn expr_parser() -> impl Parser<Token, Vec<Spanned<Expr>>, Error = Simple<Token>> + Clone {
let identifier = filter_map(|span, token| match token {
Token::Identifier(s) => Ok((s, span)),
_ => Err(Simple::expected_input_found(span, Vec::new(), Some(token))),
}).labelled("identifier");
let literal = filter_map(|span, token| match token {
Token::Int(i) => Ok((Expr::Int(i), span)),
Token::Float(f) => Ok((Expr::Float(f.parse().unwrap()), span)),
Token::Boolean(b) => Ok((Expr::Boolean(b), span)),
Token::String(s) => Ok((Expr::String(s), span)),
_ => Err(Simple::expected_input_found(span, Vec::new(), Some(token))),
}).labelled("literal");
let expr = recursive(|expr| {
let args = expr.clone()
.separated_by(just(Token::Comma))
.allow_trailing();
let atom = literal
.or(identifier.map(|(s, span)| (Expr::Identifier(s), span)))
// .or(
// expr.clone()
// .delimited_by(just(Token::OpenParen), just(Token::CloseParen)))
.labelled("atom");
let call = atom
.then(
args.clone()
.delimited_by(
just(Token::OpenParen),
just(Token::CloseParen),
)
.repeated()
)
.foldl(|name, args| {(
Expr::Call {
name: Box::new(name.clone()),
args: (args, name.1.clone()),
},
name.1,
)});
let unary = choice((
just(Token::Plus),
just(Token::Minus)))
.repeated()
.then(call)
.foldr(|op, rhs| {
(
Expr::Unary {
op: op.to_string(),
rhs: Box::new(rhs.clone()),
},
rhs.1,
)
});
let factor = unary.clone()
.then(
choice((
just(Token::Multiply),
just(Token::Divide)))
.then(unary)
.repeated())
.foldl(|lhs, (op, rhs)| {
(
Expr::Binary {
lhs: Box::new(lhs),
op: op.to_string(),
rhs: Box::new(rhs.clone()),
},
rhs.1,
)
});
let term = factor.clone()
.then(
choice((
just(Token::Plus),
just(Token::Minus)))
.then(factor)
.repeated())
.foldl(|lhs, (op, rhs)| {
(
Expr::Binary {
lhs: Box::new(lhs),
op: op.to_string(),
rhs: Box::new(rhs.clone()),
},
rhs.1,
)
});
let compare = term.clone()
.then(
choice((
just(Token::Less),
just(Token::Greater),
just(Token::Equal),
just(Token::NotEqual)))
.then(term)
.repeated())
.foldl(|lhs, (op, rhs)| {
(
Expr::Binary {
lhs: Box::new(lhs),
op: op.to_string(),
rhs: Box::new(rhs.clone()),
},
rhs.1,
)
});
let let_ = just(Token::KwLet)
.ignore_then(identifier)
.then_ignore(just(Token::Colon))
.then(identifier)
.then_ignore(just(Token::Assign))
.then(expr.clone())
.map(|((name, type_hint), value)| {
(
Expr::Let {
name: name.0.clone(),
type_hint: type_hint.0,
value: Box::new(value.clone()),
},
name.1.start..value.1.end,
)
});
let fun = just(Token::KwFun)
.ignore_then(identifier)
.then(
identifier
.then_ignore(just(Token::Colon))
.then(identifier)
.delimited_by(
just(Token::OpenParen),
just(Token::CloseParen),
)
.repeated()
)
.then_ignore(just(Token::Colon))
.then(identifier)
.then_ignore(just(Token::Assign))
.then(expr.clone())
.map(|(((name, args), type_hint), body)| {
(
Expr::Fun {
name: name.0.clone(),
type_hint: type_hint.0,
args: (args, name.1.clone()),
body: Box::new(body.clone()),
},
name.1.start..body.1.end,
)
});
let do_block = just(Token::KwDo)
.ignore_then(
expr.clone()
.then_ignore(just(Token::SemiColon))
.repeated()
)
.then_ignore(just(Token::KwEnd))
.map_with_span(|body, span| {
(
Expr::Do {
body: body.clone(),
},
span,
)
});
let_
.or(fun)
.or(do_block)
.or(compare)
}).labelled("expression");
expr
.then_ignore(just(Token::SemiColon))
.repeated()
.then_ignore(end())
}
pub fn parse(tokens: Vec<(Token, std::ops::Range<usize>)>, len: usize) -> (Option<Vec<(Expr, std::ops::Range<usize>)>>, Vec<Simple<Token>>) {
let (ast, parse_error) = expr_parser().parse_recovery(Stream::from_iter(
len..len + 1,
tokens.into_iter(),
));
return (ast, parse_error)
}

View file

@ -1,12 +1,10 @@
let foo: string = 1; -- Variables
let foo: Int = 1;
let bar: String = "bar";
fun bar (baz: int) -> int = baz + 1; -- Functions
fun qux (quux: int) -> int = do fun add (lhs: Int) (rhs: Int): Int = lhs + rhs;
let corge: int = quux + quux; fun add_2 (lhs: Int) (rhs: Int): Int = do
bar(corge); let a: Int = lhs + rhs;
let b: Int = a + lhs;
end; end;
fun add (lhs: int, rhs: int) -> int = lhs + rhs;
print(add(34, 35));
print(qux(5));

View file

@ -1,91 +0,0 @@
use crate::middle::ir::{IR, Value};
pub fn gen(irs: Vec<IR>) -> String {
let mut output = String::new();
for ir in irs {
output.push_str(&gen_ir(&ir));
}
output
}
fn gen_ir(ir: &IR) -> String {
match ir {
IR::Define { name, type_hint: _, value } => { // type_hint is only used in type_checking i think
let value = gen_ir(value);
format!("const {} = {};", name, value)
},
IR::Fun { name, return_type_hint: _, args, body } => {
let args = args
.iter()
.map(|(name, _)| format!("{}", name))
.collect::<Vec<_>>()
.join(", ");
let body = match &**body {
IR::Value { value } => gen_value(value),
IR::Do { body } => {
let mut out = String::new();
for (i, node) in body.iter().enumerate() {
if i == body.len() - 1 {
out.push_str(format!("return {};", gen_ir(node)).as_str());
} else {
out.push_str(&gen_ir(node));
}
}
out
},
IR::Binary { op, left, right } => {
format!(
"return {} {} {};",
gen_ir(left),
op,
gen_ir(right)
)
},
_ => { println!("{:?}", body); todo!() }
};
format!(
"const {} = ({}) => {{ {} }};",
name,
args,
body
)
},
IR::Call { name, args } => {
match name.as_str() {
"print" => {
let args = gen_ir(&args[0]);
format!("console.log({});", args.trim_end_matches(";"))
},
_ => {
let args = args
.iter()
.map(|arg| gen_ir(arg))
.collect::<Vec<_>>()
.join(", ");
format!("{}({})", name, args)
},
}
},
IR::Value { value } => {
gen_value(value)
},
IR::Binary { op, left, right } => {
let left = gen_ir(left);
let right = gen_ir(right);
format!("({} {} {});", left, op, right)
},
_ => { println!("{:?}", ir); todo!() }
}
}
fn gen_value(value: &Value) -> String {
match value {
Value::Int(i) => format!("{}", i),
Value::Float(f) => format!("{}", f),
Value::Bool(b) => format!("{}", b),
Value::String(s) => format!("\"{}\"", s),
Value::Ident(s) => format!("{}", s),
}
}

View file

@ -1,2 +0,0 @@
/// Javascript compiler backend
pub mod js;

View file

@ -1 +0,0 @@
pub mod parse;

View file

@ -1,319 +0,0 @@
use chumsky::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Token {
// Types
Int(i64), Float(String),
Bool(bool), String(String),
Ident(String),
// Symbols
Operator(String),
Delimiter(char),
Semicolon,
Assign, Colon,
Comma, Dot,
ReturnHint,
// Keywords
Import,
Let, Fun,
If, Then, Else, End,
Do,
}
pub type Span = std::ops::Range<usize>;
pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = Simple<char>> {
let int = text::int(10)
.map(|s: String| Token::Int(s.parse().unwrap()));
// TODO: this is not working somehow
let float = text::int(10)
.then_ignore(just('.'))
.chain::<char, _, _>(text::digits(10))
.collect::<String>()
.map(|s: String| Token::Float(s));
let string = just('"')
.ignore_then(filter(|c| *c != '"').repeated())
.then_ignore(just('"'))
.collect::<String>()
.map(|s: String| Token::String(s));
let symbol = choice((
just(';').to(Token::Semicolon),
just('=').to(Token::Assign),
just(':').to(Token::Colon),
just(',').to(Token::Comma),
just("->").to(Token::ReturnHint),
just(".").to(Token::Dot),
));
let operator = choice((
just("+"),
just("-"),
just("*"),
just("/"),
just("%"),
just("!"),
just("=="),
just("!="),
just("<"),
just(">"),
just("<="),
just(">="),
)).map(|c| Token::Operator(c.to_string()));
let delimiter = choice((
just('('),
just(')'),
just('{'),
just('}'),
)).map(|c| Token::Delimiter(c));
let keyword = text::ident().map(|s: String| match s.as_str() {
"true" => Token::Bool(true),
"false" => Token::Bool(false),
"import" => Token::Import,
"let" => Token::Let,
"fun" => Token::Fun,
"if" => Token::If,
"then" => Token::Then,
"else" => Token::Else,
"end" => Token::End,
"do" => Token::Do,
_ => Token::Ident(s),
});
let token = int
.or(float)
.or(string)
.or(symbol)
.or(operator)
.or(delimiter)
.or(keyword)
.recover_with(skip_then_retry_until([]));
let comment = just("/*").then(take_until(just("*/")))
.padded()
.ignored();
token
.padded_by(comment.repeated())
.map_with_span(|token, span| (token, span))
.padded()
.repeated()
}
#[derive(Clone, Debug)]
pub enum Expr {
Int(i64), Float(f64),
Bool(bool), String(String),
Ident(String),
Unary { op: String, expr: Box<Self> },
Binary { op: String, left: Box<Self>, right: Box<Self> },
Call { name: Box<Self>, args: Vec<Self> },
Let {
name: String,
type_hint: String,
value: Box<Self>,
},
Fun {
name: String,
type_hint: String,
args: Vec<(String, String)>,
body: Box<Self>,
},
If {
cond: Box<Self>,
then: Box<Self>,
else_: Box<Self>,
},
Do { body: Vec<Self> },
}
fn expr_parser() -> impl Parser<Token, Expr, Error = Simple<Token>> + Clone {
let ident = filter_map(|span, token| match token {
Token::Ident(s) => Ok(s.clone()),
_ => Err(Simple::expected_input_found(span, Vec::new(), Some(token))),
}).labelled("identifier");
let literal = filter_map(|span, token| match token {
Token::Int(i) => Ok(Expr::Int(i)),
Token::Float(f) => Ok(Expr::Float(f.parse().unwrap())),
Token::Bool(b) => Ok(Expr::Bool(b)),
Token::String(s) => Ok(Expr::String(s)),
_ => Err(Simple::expected_input_found(span, Vec::new(), Some(token))),
}).labelled("literal");
let expr = recursive(|expr| {
let args = expr.clone()
.separated_by(just(Token::Comma))
.allow_trailing();
let atom = literal
.or(ident.map(Expr::Ident))
.or(
expr.clone()
.delimited_by(just(Token::Delimiter('(')), just(Token::Delimiter(')'))))
.labelled("atom");
let call = atom
.then(
args
.delimited_by(
just(Token::Delimiter('(')),
just(Token::Delimiter(')')))
.repeated()
)
.foldl(|f, args| {
Expr::Call {
name: Box::new(f),
args,
}
});
let unary = choice((
just(Token::Operator("-".to_string())).to("-"),
just(Token::Operator("!".to_string())).to("!")))
.repeated()
.then(call)
.foldr(|op, rhs| Expr::Unary { op: op.to_string(), expr: Box::new(rhs) }).labelled("unary");
let factor = unary.clone()
.then(
choice((
just(Token::Operator("*".to_string())).to("*"),
just(Token::Operator("/".to_string())).to("/")))
.then(unary)
.repeated())
.foldl(|lhs, (op, rhs)| Expr::Binary {
op: op.to_string(),
left: Box::new(lhs),
right: Box::new(rhs)
}).labelled("factor");
let term = factor.clone()
.then(
choice((
just(Token::Operator("+".to_string())).to("+"),
just(Token::Operator("-".to_string())).to("-")))
.then(factor)
.repeated())
.foldl(|lhs, (op, rhs)| Expr::Binary {
op: op.to_string(),
left: Box::new(lhs),
right: Box::new(rhs)
}).labelled("term");
let compare = term.clone()
.then(
choice((
just(Token::Operator("==".to_string())).to("=="),
just(Token::Operator("!=".to_string())).to("!="),
just(Token::Operator("<".to_string())).to("<"),
just(Token::Operator(">".to_string())).to(">"),
just(Token::Operator("<=".to_string())).to("<="),
just(Token::Operator(">=".to_string())).to(">=")))
.then(term)
.repeated())
.foldl(|lhs, (op, rhs)| Expr::Binary {
op: op.to_string(),
left: Box::new(lhs),
right: Box::new(rhs)
}).labelled("compare");
compare
}).labelled("expression");
let declare = recursive(|decl| {
let do_block = just(Token::Do)
.ignore_then(
expr.clone()
.or(decl.clone())
.then_ignore(just(Token::Semicolon))
.repeated())
.then_ignore(just(Token::End))
.map(|body| Expr::Do { body });
let declare_var = just(Token::Let)
.ignore_then(ident)
.then_ignore(just(Token::Colon))
.then(ident)
.then_ignore(just(Token::Assign))
.then(
do_block.clone()
.or(decl.clone())
)
.map(|((name, type_hint), value)| Expr::Let {
name,
type_hint,
value: Box::new(value),
}).labelled("variable");
let declare_fun = just(Token::Fun)
.ignore_then(ident)
.then_ignore(just(Token::Delimiter('(')))
.then(
(ident
.then_ignore(just(Token::Colon))
.then(ident))
.separated_by(just(Token::Comma))
.allow_trailing()
)
.then_ignore(just(Token::Delimiter(')')))
.then_ignore(just(Token::ReturnHint))
.then(ident)
.then_ignore(just(Token::Assign))
.then(
do_block.clone()
.or(decl.clone())
)
.map(|(((name, args), type_hint), body)| Expr::Fun {
name,
type_hint,
args,
body: Box::new(body),
}).labelled("function");
let if_cond = just(Token::If)
.ignore_then(expr.clone())
.then_ignore(just(Token::Then))
.then(
do_block.clone()
.or(decl.clone().then_ignore(just(Token::Semicolon).or_not()))
)
.then_ignore(just(Token::Else))
.then(
do_block.clone()
.or(decl.clone().then_ignore(just(Token::Semicolon).or_not()))
)
.then_ignore(just(Token::End))
.map(|((cond, then), else_)| Expr::If {
cond: Box::new(cond),
then: Box::new(then),
else_: Box::new(else_),
}).labelled("if");
declare_var
.or(declare_fun)
.or(if_cond)
.or(do_block)
.or(expr)
}).labelled("declare");
declare
}
pub fn parser() -> impl Parser<Token, Vec<Expr>, Error = Simple<Token>> + Clone {
expr_parser()
.then_ignore(just(Token::Semicolon))
.repeated()
.then_ignore(end())
}

View file

@ -1,82 +0,0 @@
use std::{
fs,
io::{self, Write},
time,
};
use chumsky::{Parser, Stream};
use clap::Parser as ArgParser;
/// Arguments handler.
pub mod args;
use args::{Args, Options};
/// Front-end of the language.
/// Contains lexer, parser and token types.
pub mod front;
use front::parse::{lexer, parser};
/// Middle-end of the language.
/// Contains the intermediate representation.
pub mod middle;
use middle::ir;
/// Back-end of the language.
/// Contains code generator.
pub mod back;
/// Utility functions.
pub mod util;
use crate::util::log;
fn main() {
let args = Args::parse();
match args.options {
Options::Compile {
input: file_name,
ast: _print_ast,
} => {
// Get file contents.
let src = fs::read_to_string(&file_name).expect("Failed to read file");
// Lex the file.
let (tokens, lex_error) = lexer().parse_recovery(src.as_str());
let len = src.chars().count();
// Parse the file.
let (ast, parse_error) = parser().parse_recovery(Stream::from_iter(
len..len + 1,
tokens.clone().unwrap().into_iter(),
));
if lex_error.is_empty() {
if parse_error.is_empty() {
match ast {
// If there is some AST then generate code.
Some(ast) => {
let start = time::Instant::now();
let ir = ir::ast_to_ir(ast);
let out = back::js::gen(ir);
let file = fs::File::create("out.js").expect("Failed to create file");
let mut file = io::BufWriter::new(file);
file.write_all(out.as_bytes())
.expect("Failed to write file");
let all_elapsed = start.elapsed();
log(0, format!("Done in {}s", all_elapsed.as_secs_f64()));
}
// If there is no AST, then notify the user.
None => println!("no ast :("),
};
} else {
eprintln!("{:#?}\n(Parser error)", parse_error);
}
} else {
eprintln!("{:#?}\n(Lexer error)", lex_error);
}
}
}
}

View file

@ -1,90 +0,0 @@
use crate::front::parse::Expr;
#[derive(Debug, Clone)]
pub enum TypeHint {
Int,
Float,
Bool,
String,
}
#[derive(Debug, Clone)]
pub enum Value {
Int(i64),
Float(f64),
Bool(bool),
String(String),
Ident(String),
}
#[derive(Debug, Clone)]
pub enum IR {
Define { name: String, type_hint: TypeHint, value: Box<Self> },
Fun { name: String, return_type_hint: TypeHint, args: Vec<(String, TypeHint)>, body: Box<Self> },
Call { name: String, args: Vec<Self> },
Do { body: Vec<Self> },
If { cond: Box<Self>, body: Box<Self>, else_body: Box<Self> },
Value { value: Value },
Binary { op: String, left: Box<Self>, right: Box<Self> },
}
pub fn ast_to_ir(ast: Vec<Expr>) -> Vec<IR> {
let mut ir = Vec::new();
for expr in ast {
ir.push(expr_to_ir(&expr));
}
ir
}
pub fn expr_to_ir(expr: &Expr) -> IR {
match expr {
Expr::Let { name, type_hint, value } => IR::Define {
name: name.clone(),
type_hint: get_typehint(type_hint),
value: Box::new(expr_to_ir(value)),
},
Expr::Fun { name, type_hint, args, body } => IR::Fun {
name: name.clone(),
return_type_hint: get_typehint(type_hint),
args: args
.iter()
.map(|(name, type_hint)| (name.to_string(), get_typehint(type_hint)))
.collect::<Vec<_>>(),
body: Box::new(expr_to_ir(body)),
},
Expr::Call { name, args } => IR::Call {
name: match &**name {
Expr::Ident(s) => s.clone(),
_ => panic!("Expected ident in call"),
},
args: args.iter().map(|arg| expr_to_ir(arg)).collect(),
},
Expr::Do { body } => IR::Do {
body: body
.iter()
.map(|expr| expr_to_ir(expr))
.collect::<Vec<_>>(),
},
Expr::Binary { op, left, right } => IR::Binary {
op: op.to_string(),
left: Box::new(expr_to_ir(left)),
right: Box::new(expr_to_ir(right)),
},
Expr::Int(value) => IR::Value { value: Value::Int(*value) },
Expr::Float(value) => IR::Value { value: Value::Float(*value) },
Expr::Bool(value) => IR::Value { value: Value::Bool(*value) },
Expr::String(value) => IR::Value { value: Value::String(value.clone()) },
Expr::Ident(name) => IR::Value { value: Value::Ident(name.clone()) },
_ => { println!("{:?}", expr); todo!() }
}
}
fn get_typehint(from: &String) -> TypeHint {
match from.as_str() {
"int" => TypeHint::Int,
"float" => TypeHint::Float,
"bool" => TypeHint::Bool,
"string" => TypeHint::String,
_ => panic!("Unsupported type hint: {}", from)
}
}

View file

@ -1,2 +0,0 @@
// The intemediate representation of the AST
pub mod ir;