diff --git a/able-script-test/parse_test.able b/able-script-test/parse_test.able index 6d7ceca3..f3c8d371 100644 --- a/able-script-test/parse_test.able +++ b/able-script-test/parse_test.able @@ -1,3 +1 @@ -if (true) { - var a = 3; -} \ No newline at end of file +a() \ No newline at end of file diff --git a/src/tokens.rs b/src/lexer.rs similarity index 68% rename from src/tokens.rs rename to src/lexer.rs index b21c4d51..dbe33a8a 100644 --- a/src/tokens.rs +++ b/src/lexer.rs @@ -1,7 +1,62 @@ -use logos::{Lexer, Logos}; +use logos::{Lexer, Logos, Span}; use crate::variables::Abool; +pub struct PeekableLexer<'source> { + lexer: Lexer<'source, Token>, + peeked: Option>, +} + +impl<'source> PeekableLexer<'source> { + pub fn lexer(source: &'source str) -> Self { + Self { + lexer: Token::lexer(source), + peeked: None, + } + } + + /// Returns a reference to the next() value without advancing the iterator. + #[inline] + pub fn peek(&mut self) -> &Option { + if self.peeked.is_none() { + self.peeked = Some(self.lexer.next()); + } + self.peeked.as_ref().unwrap() + } + + /// Get the range for the current token in `Source`. + #[inline] + pub fn span(&self) -> Span { + self.lexer.span() + } + + /// Get a string slice of the current token. + #[inline] + pub fn slice(&self) -> &'source str { + self.lexer.slice() + } + + /// Get a slice of remaining source, starting at the end of current token. + #[inline] + pub fn remainder(&self) -> &'source str { + self.lexer.remainder() + } +} + +impl<'source> Iterator for PeekableLexer<'source> { + type Item = Token; + + /// Advances the iterator and returns the next value. + /// + /// Returns [`None`] when iteration is finished. + /// Individual iterator implementations may choose to resume iteration, and so calling `next()` + /// again may or may not eventually start returning [`Some(Item)`] again at some point. + #[inline] + fn next(&mut self) -> Option { + self.lexer.next() + } +} + #[derive(Logos, Debug, PartialEq, Clone)] pub enum Token { #[token("functio")] diff --git a/src/main.rs b/src/main.rs index 32d3d3c2..bd6b72ae 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,8 +2,8 @@ mod base_55; mod error; +mod lexer; mod parser; -mod tokens; mod variables; use clap::{App, Arg}; diff --git a/src/parser/item.rs b/src/parser/item.rs index 46616dce..63dafa03 100644 --- a/src/parser/item.rs +++ b/src/parser/item.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use crate::variables::Value; #[derive(Debug, Clone)] @@ -11,6 +13,7 @@ pub enum Expr { }, FunctionDeclaration { iden: String, + args: Vec, body: Vec, }, BfFDeclaration { @@ -22,6 +25,10 @@ pub enum Expr { body: Vec, }, + FunctionCall { + iden: Iden, + args: HashMap, + }, Literal(Value), Melo(Iden), } diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 36a44c0d..24612522 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -1,19 +1,19 @@ mod item; +mod ops; mod utils; use item::Expr; use crate::{ error::{Error, ErrorKind}, + lexer::PeekableLexer, variables::Value, }; -use crate::{parser::item::Iden, tokens::Token}; - -use logos::Logos; +use crate::{lexer::Token, parser::item::Iden}; /// Parser structure / state machine pub struct Parser<'a> { - lexer: logos::Lexer<'a, Token>, + lexer: PeekableLexer<'a>, ast: Vec, } @@ -21,7 +21,7 @@ impl<'a> Parser<'a> { /// Create a new parser object pub fn new(source: &'a str) -> Self { Self { - lexer: Token::lexer(source), + lexer: PeekableLexer::lexer(source), ast: Vec::new(), } } @@ -50,6 +50,7 @@ impl<'a> Parser<'a> { let start = self.lexer.span().start; match token { + Token::Identifier(_) => self.parse_ops(token), // Control flow Token::If => self.if_cond(), @@ -110,13 +111,14 @@ impl<'a> Parser<'a> { fn function_declaration(&mut self) -> Result { let iden = self.require_iden()?; self.require(Token::LeftParenthesis)?; + let args = vec![]; self.require(Token::RightParenthesis)?; self.require(Token::LeftBrace)?; // Parse function body let body = self.parse_body()?; - Ok(Expr::FunctionDeclaration { iden, body }) + Ok(Expr::FunctionDeclaration { iden, args, body }) } /// Declare BF FFI Function diff --git a/src/parser/ops.rs b/src/parser/ops.rs new file mode 100644 index 00000000..2b76e68f --- /dev/null +++ b/src/parser/ops.rs @@ -0,0 +1,30 @@ +use std::collections::HashMap; + +use super::*; + +impl<'a> Parser<'a> { + pub(super) fn parse_ops(&mut self, token: Token) -> Result { + let iden = if let Token::Identifier(i) = token { + Iden(i) + } else { + unimplemented!() + }; + + let mut buf = Vec::new(); + + buf.push(match self.lexer.peek() { + Some(Token::LeftParenthesis) => self.fn_call(iden)?, + _ => unimplemented!(), + }); + + Ok(buf[0].clone()) + } + + fn fn_call(&mut self, iden: Iden) -> Result { + self.require(Token::RightParenthesis)?; + Ok(Expr::FunctionCall { + iden, + args: HashMap::new(), + }) + } +} diff --git a/src/parser/utils.rs b/src/parser/utils.rs index 42c2b63a..49c8b346 100644 --- a/src/parser/utils.rs +++ b/src/parser/utils.rs @@ -1,5 +1,5 @@ use crate::error::{Error, ErrorKind}; -use crate::tokens::Token; +use crate::lexer::Token; use crate::variables::Abool; use super::{item::Expr, Parser};