1
1
Fork 0
mirror of https://github.com/azur1s/bobbylisp.git synced 2024-09-28 05:17:37 +00:00

feat: funcs ast

This commit is contained in:
Natapat Samutpong 2022-02-12 15:04:44 +07:00
parent 028c58980c
commit b455c661a0
5 changed files with 62 additions and 9 deletions

View file

@ -1,2 +1,5 @@
let foo :: String = "Hello, "; let foo :: String = "foo";
let bar :: String = "World!"; let bar :: String = "bar";
func join :: (a , b) = {
let baz :: String = "yay";
};

View file

@ -34,8 +34,11 @@ syntax! { not_operator , "!", Token::Not }
syntax! { typehint_punctuation , "::", Token::Typehint } syntax! { typehint_punctuation , "::", Token::Typehint }
syntax! { lparen_punctuation , "(", Token::LParen } syntax! { lparen_punctuation , "(", Token::LParen }
syntax! { rparen_punctuation , ")", Token::RParen } syntax! { rparen_punctuation , ")", Token::RParen }
syntax! { lbrace_punctuation , "{", Token::LBrace }
syntax! { rbrace_punctuation , "}", Token::RBrace }
syntax! { semicolon_punctuation , ";", Token::Semicolon } syntax! { semicolon_punctuation , ";", Token::Semicolon }
syntax! { colon_punctuation , ":", Token::Colon } syntax! { colon_punctuation , ":", Token::Colon }
syntax! { comma_punctuation , ",", Token::Comma }
// Operator & Punctuation // Operator & Punctuation
fn lex_operator_punctuation(input: &Bytes) -> IResult<&Bytes, Token> { fn lex_operator_punctuation(input: &Bytes) -> IResult<&Bytes, Token> {
@ -50,7 +53,8 @@ fn lex_operator_punctuation(input: &Bytes) -> IResult<&Bytes, Token> {
typehint_punctuation, typehint_punctuation,
lparen_punctuation, rparen_punctuation, lparen_punctuation, rparen_punctuation,
semicolon_punctuation, colon_punctuation, lbrace_punctuation, rbrace_punctuation,
semicolon_punctuation, colon_punctuation, comma_punctuation,
))(input) ))(input)
} }

View file

@ -14,7 +14,9 @@ pub enum Token {
Plus, Minus, Mul, Div, Not, Plus, Minus, Mul, Div, Not,
Eq, NEq, Lt, Gt, Lte, Gte, Eq, NEq, Lt, Gt, Lte, Gte,
LParen, RParen, Semicolon, Colon, LParen, RParen,
LBrace, RBrace,
Semicolon, Colon, Comma,
If, Else, Let, Func, If, Else, Let, Func,
} }

View file

@ -2,7 +2,7 @@ use nom::{
bytes::complete::take, bytes::complete::take,
combinator::{verify, map}, combinator::{verify, map},
Err, Err,
IResult, sequence::{terminated, tuple}, multi::many0, branch::alt, error::{Error, ErrorKind}, IResult, sequence::{terminated, tuple, pair, preceded, delimited}, multi::many0, branch::alt, error::{Error, ErrorKind},
}; };
use super::model::{Token, Tokens, Precedence, Infix, Program, Stmt, Expr, Ident, Literal}; use super::model::{Token, Tokens, Precedence, Infix, Program, Stmt, Expr, Ident, Literal};
@ -16,9 +16,15 @@ macro_rules! tag_token (
); );
tag_token!(tag_let, Token::Let); tag_token!(tag_let, Token::Let);
tag_token!(tag_func, Token::Func);
tag_token!(tag_assign, Token::Assign); tag_token!(tag_assign, Token::Assign);
tag_token!(tag_typehint, Token::Typehint); tag_token!(tag_typehint, Token::Typehint);
tag_token!(tag_semicolon, Token::Semicolon); tag_token!(tag_semicolon, Token::Semicolon);
tag_token!(tag_lparen, Token::LParen);
tag_token!(tag_rparen, Token::RParen);
tag_token!(tag_lbrace, Token::LBrace);
tag_token!(tag_rbrace, Token::RBrace);
tag_token!(tag_comma, Token::Comma);
tag_token!(tag_end_of_file, Token::EndOfFile); tag_token!(tag_end_of_file, Token::EndOfFile);
fn infix_operator(token: &Token) -> (Precedence, Option<Infix>) { fn infix_operator(token: &Token) -> (Precedence, Option<Infix>) {
@ -77,7 +83,7 @@ fn parse_ident_expr(input: Tokens) -> IResult<Tokens, Expr> {
map(parse_ident, Expr::Ident)(input) map(parse_ident, Expr::Ident)(input)
} }
fn parse_let(input: Tokens) -> IResult<Tokens, Stmt> { fn parse_let_stmt(input: Tokens) -> IResult<Tokens, Stmt> {
map( map(
tuple(( tuple((
tag_let, tag_let,
@ -92,6 +98,32 @@ fn parse_let(input: Tokens) -> IResult<Tokens, Stmt> {
)(input) )(input)
} }
fn parse_params(input: Tokens) -> IResult<Tokens, Vec<Ident>> {
map(
pair(parse_ident, many0(preceded(tag_comma, parse_ident))),
|(p, ps)| [&vec![p][..], &ps[..]].concat(),
)(input)
}
fn empty_params(input: Tokens) -> IResult<Tokens, Vec<Ident>> { Ok((input, vec![])) }
fn parse_func_stmt(input: Tokens) -> IResult<Tokens, Stmt> {
map(
tuple((
tag_func,
parse_ident,
tag_typehint,
tag_lparen,
alt((parse_params, empty_params)),
tag_rparen,
tag_assign,
parse_block_stmt,
tag_semicolon,
)),
|(_, ident, _, _, params, _, _, block, _)| Stmt::Func(ident, params, block),
)(input)
}
fn parse_expr(input: Tokens, precedence: Precedence, left: Expr) -> IResult<Tokens, Expr> { fn parse_expr(input: Tokens, precedence: Precedence, left: Expr) -> IResult<Tokens, Expr> {
let (i1, t1) = take(1usize)(input)?; let (i1, t1) = take(1usize)(input)?;
@ -123,9 +155,14 @@ fn parse_expr_lowest(input: Tokens) -> IResult<Tokens, Expr> {
parse_expr_with(input, Precedence::Lowest) parse_expr_with(input, Precedence::Lowest)
} }
fn parse_block_stmt(input: Tokens) -> IResult<Tokens, Program> {
delimited(tag_lbrace, many0(parse_stmt), tag_rbrace)(input)
}
fn parse_stmt(input: Tokens) -> IResult<Tokens, Stmt> { fn parse_stmt(input: Tokens) -> IResult<Tokens, Stmt> {
alt(( alt((
parse_let, parse_let_stmt,
parse_func_stmt,
))(input) ))(input)
} }

View file

@ -16,8 +16,15 @@ fn main() {
let bytes: Vec<u8> = fs::read(src).unwrap(); let bytes: Vec<u8> = fs::read(src).unwrap();
let (_errs_, tokens) = Lexer::lex_tokens(&bytes).unwrap(); let (_errs_, tokens) = Lexer::lex_tokens(&bytes).unwrap();
let tokens = Tokens::new(&tokens); let tokens = Tokens::new(&tokens);
let (_errs_, ast) = Parser::parse(tokens).unwrap(); let ast = Parser::parse(tokens);
println!("{:#?}", ast); match ast {
Ok(ast) => {
println!("{:#?}", ast);
}
Err(err) => {
println!("{:#?}", err);
}
}
}, },
} }
} }