1
1
Fork 0
mirror of https://github.com/azur1s/bobbylisp.git synced 2024-10-16 02:37:40 -05:00

lexer + call

This commit is contained in:
Natapat Samutpong 2022-02-17 12:04:52 +07:00
parent 91f89d7ef6
commit fce1760198
3 changed files with 163 additions and 42 deletions

View file

@ -1,2 +1,2 @@
fun add a b = a + b; fun foo a b = a + b;
let foo = add (1, 2); let res = foo(34, 35);

View file

@ -1,10 +1,101 @@
use chumsky::prelude::*; use chumsky::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Token {
// Types
Int(i64), Float(String),
Boolean(bool), String(String),
Ident(String),
// Symbols
Operator(String),
Delimiter(char),
Semicolon,
Assign, Colon,
Comma,
// Keywords
Let, Fun,
}
pub type Span = std::ops::Range<usize>;
pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = Simple<char>> {
let int = text::int(10)
.map(|s: String| Token::Int(s.parse().unwrap()));
let float = text::int(10)
.then_ignore(just('.'))
.chain::<char, _, _>(text::digits(10))
.collect::<String>()
.map(|s: String| Token::Float(s));
let string = just('"')
.ignore_then(filter(|c| *c != '\\' && *c != '"').repeated())
.then_ignore(just('"'))
.collect::<String>()
.map(|s: String| Token::String(s));
let operator = choice((
just("+"),
just("-"),
just("*"),
just("/"),
just("%"),
just("!"),
just("=="),
just("!="),
just("<"),
just(">"),
just("<="),
just(">="),
)).map(|c| Token::Operator(c.to_string()));
let delimiter = choice((
just('('),
just(')'),
)).map(|c| Token::Delimiter(c));
let symbol = choice((
just(';').to(Token::Semicolon),
just('=').to(Token::Assign),
just(':').to(Token::Colon),
just(',').to(Token::Comma),
));
let keyword = text::ident().map(|s: String| match s.as_str() {
"true" => Token::Boolean(true),
"false" => Token::Boolean(false),
"let" => Token::Let,
"fun" => Token::Fun,
_ => Token::Ident(s),
});
let token = int
.or(float)
.or(string)
.or(operator)
.or(delimiter)
.or(symbol)
.or(keyword)
.recover_with(skip_then_retry_until([]));
let comment = just("//").then(take_until(just('\n'))).padded();
token
.padded_by(comment.repeated())
.map_with_span(|token, span| (token, span))
.padded()
.repeated()
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum Expr { pub enum Expr {
Int(i64), Int(i64), Float(f64),
Float(f64), Boolean(bool), String(String),
Ident(String), Ident(String),
Unary { op: String, expr: Box<Self> }, Unary { op: String, expr: Box<Self> },
Binary { op: String, left: Box<Self>, right: Box<Self> }, Binary { op: String, left: Box<Self>, right: Box<Self> },
@ -18,45 +109,67 @@ pub enum Expr {
body: Box<Self>, body: Box<Self>,
}, },
Call { Call {
name: String, name: Box<Self>,
args: Vec<Self>, args: Vec<Self>,
}, },
} }
fn expr_parser() -> impl Parser<char, Expr, Error = Simple<char>> { fn expr_parser() -> impl Parser<Token, Expr, Error = Simple<Token>> + Clone {
let ident = text::ident().padded(); let ident = filter_map(|span, token| match token {
Token::Ident(s) => Ok(s.clone()),
_ => Err(Simple::expected_input_found(span, Vec::new(), Some(token))),
}).labelled("identifier");
let expr = recursive(|expr| { let expr = recursive(|expr| {
let int = text::int(10) let literal = filter_map(|span, token| match token {
.map(|s: String| Expr::Int(s.parse().unwrap())); Token::Int(i) => Ok(Expr::Int(i)),
Token::Float(f) => Ok(Expr::Float(f.parse().unwrap())),
Token::Boolean(b) => Ok(Expr::Boolean(b)),
Token::String(s) => Ok(Expr::String(s)),
_ => Err(Simple::expected_input_found(span, Vec::new(), Some(token))),
}).labelled("literal");
let float = text::int(10) let items = expr.clone()
.then_ignore(just('.')) .chain(just(Token::Comma)
.chain::<char, _, _>(text::digits(10)) .ignore_then(expr.clone()).repeated())
.collect::<String>() .then_ignore(just(Token::Comma).or_not())
.map(|s: String| Expr::Float(s.parse().unwrap())); .or_not()
.map(|item| item.unwrap_or_else(Vec::new));
let call = ident let atom = literal
.then(expr.clone()
.separated_by(just(','))
.allow_trailing()
.delimited_by(just('('), just(')')))
.map(|(name, args)| Expr::Call { name, args });
let atom = int
.or(float)
.or(call)
.or(ident.map(Expr::Ident)) .or(ident.map(Expr::Ident))
.or(expr.delimited_by(just('('), just(')'))) .or(
expr.clone()
.delimited_by(just(Token::Delimiter('(')), just(Token::Delimiter(')'))))
.labelled("atom"); .labelled("atom");
let unary = choice((just('-'), just('!'))) let call = atom
.then(
items
.delimited_by(
just(Token::Delimiter('(')),
just(Token::Delimiter(')')))
.repeated() .repeated()
.then(atom) )
.foldl(|f, args| {
Expr::Call {
name: Box::new(f),
args,
}
});
let unary = choice((
just(Token::Operator("-".to_string())).to("-"),
just(Token::Operator("!".to_string())).to("!")))
.repeated()
.then(call)
.foldr(|op, rhs| Expr::Unary { op: op.to_string(), expr: Box::new(rhs) }).labelled("unary"); .foldr(|op, rhs| Expr::Unary { op: op.to_string(), expr: Box::new(rhs) }).labelled("unary");
let factor = unary.clone() let factor = unary.clone()
.then(choice((just('*'), just('/'))) .then(
choice((
just(Token::Operator("*".to_string())).to("*"),
just(Token::Operator("/".to_string())).to("/")))
.then(unary) .then(unary)
.repeated()) .repeated())
.foldl(|lhs, (op, rhs)| Expr::Binary { .foldl(|lhs, (op, rhs)| Expr::Binary {
@ -66,7 +179,10 @@ fn expr_parser() -> impl Parser<char, Expr, Error = Simple<char>> {
}).labelled("factor"); }).labelled("factor");
let term = factor.clone() let term = factor.clone()
.then(choice((just('+'), just('-'))) .then(
choice((
just(Token::Operator("+".to_string())).to("+"),
just(Token::Operator("-".to_string())).to("-")))
.then(factor) .then(factor)
.repeated()) .repeated())
.foldl(|lhs, (op, rhs)| Expr::Binary { .foldl(|lhs, (op, rhs)| Expr::Binary {
@ -75,26 +191,26 @@ fn expr_parser() -> impl Parser<char, Expr, Error = Simple<char>> {
right: Box::new(rhs) right: Box::new(rhs)
}).labelled("term"); }).labelled("term");
term.padded() term
}).labelled("expression"); }).labelled("expression");
let declare = recursive(|decl| { let declare = recursive(|decl| {
let declare_var = text::keyword("let") let declare_var = just(Token::Let)
.ignore_then(ident) .ignore_then(ident)
.then_ignore(just('=')) .then_ignore(just(Token::Assign))
.then(expr.clone()) .then(expr.clone())
.then_ignore(just(';')) .then_ignore(just(Token::Semicolon))
.map(|(name, rhs)| Expr::Let { .map(|(name, rhs)| Expr::Let {
name, name,
value: Box::new(rhs), value: Box::new(rhs),
}); });
let declare_fun = text::keyword("fun") let declare_fun = just(Token::Fun)
.ignore_then(ident) .ignore_then(ident)
.then(ident.repeated()) .then(ident.repeated())
.then_ignore(just('=')) .then_ignore(just(Token::Assign))
.then(expr.clone()) .then(expr.clone())
.then_ignore(just(';')) .then_ignore(just(Token::Semicolon))
.map(|((name, args), body)| Expr::Fun { .map(|((name, args), body)| Expr::Fun {
name, name,
args, args,
@ -104,13 +220,12 @@ fn expr_parser() -> impl Parser<char, Expr, Error = Simple<char>> {
declare_var declare_var
.or(declare_fun) .or(declare_fun)
.or(expr) .or(expr)
.padded()
}); });
declare declare
} }
pub fn parser() -> impl Parser<char, Vec<Expr>, Error = Simple<char>> { pub fn parser() -> impl Parser<Token, Vec<Expr>, Error = Simple<Token>> + Clone {
expr_parser() expr_parser()
.repeated() .repeated()
.then_ignore(end()) .then_ignore(end())

View file

@ -1,6 +1,6 @@
use std::fs; use std::fs;
use chumsky::Parser; use chumsky::{Parser, Stream};
use clap::Parser as ArgParser; use clap::Parser as ArgParser;
/// Arguments handler. /// Arguments handler.
@ -10,15 +10,21 @@ use args::{Args, Options};
/// Front-end of the language. /// Front-end of the language.
/// Contains lexer, parser and token types. /// Contains lexer, parser and token types.
pub mod front; pub mod front;
use front::parse::parser; use front::parse::{lexer, parser};
fn main() { fn main() {
let args = Args::parse(); let args = Args::parse();
match args.options { match args.options {
Options::Compile { input: src, ast: _print_ast } => { Options::Compile { input: src, ast: _print_ast } => {
let src = fs::read_to_string(src).expect("Failed to read file"); let src = fs::read_to_string(src).expect("Failed to read file");
let tokens = parser().parse_recovery(src.as_str()); let (tokens, lex_error) = lexer().parse_recovery(src.as_str());
println!("{:?}", tokens); let len = src.chars().count();
let (ast, parse_error) = parser().parse_recovery(Stream::from_iter(len..len + 1, tokens.clone().unwrap().into_iter()));
if parse_error.is_empty() {
println!("{:#?}", ast);
} else {
println!("{:?}", parse_error);
}
}, },
} }
} }