Added Peekable Lexer

- Added wrapper for Lexer with `peek()` method
- Renamed `token` module to `lexer` as it more describe it's function
- Started work on operator flow
This commit is contained in:
Erin 2021-04-28 22:52:19 +02:00 committed by ondra05
parent f3779deeb5
commit e45afeac5e
7 changed files with 104 additions and 12 deletions

View file

@ -1,3 +1 @@
if (true) { a()
var a = 3;
}

View file

@ -1,7 +1,62 @@
use logos::{Lexer, Logos}; use logos::{Lexer, Logos, Span};
use crate::variables::Abool; use crate::variables::Abool;
pub struct PeekableLexer<'source> {
lexer: Lexer<'source, Token>,
peeked: Option<Option<Token>>,
}
impl<'source> PeekableLexer<'source> {
pub fn lexer(source: &'source str) -> Self {
Self {
lexer: Token::lexer(source),
peeked: None,
}
}
/// Returns a reference to the next() value without advancing the iterator.
#[inline]
pub fn peek(&mut self) -> &Option<Token> {
if self.peeked.is_none() {
self.peeked = Some(self.lexer.next());
}
self.peeked.as_ref().unwrap()
}
/// Get the range for the current token in `Source`.
#[inline]
pub fn span(&self) -> Span {
self.lexer.span()
}
/// Get a string slice of the current token.
#[inline]
pub fn slice(&self) -> &'source str {
self.lexer.slice()
}
/// Get a slice of remaining source, starting at the end of current token.
#[inline]
pub fn remainder(&self) -> &'source str {
self.lexer.remainder()
}
}
impl<'source> Iterator for PeekableLexer<'source> {
type Item = Token;
/// Advances the iterator and returns the next value.
///
/// Returns [`None`] when iteration is finished.
/// Individual iterator implementations may choose to resume iteration, and so calling `next()`
/// again may or may not eventually start returning [`Some(Item)`] again at some point.
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.lexer.next()
}
}
#[derive(Logos, Debug, PartialEq, Clone)] #[derive(Logos, Debug, PartialEq, Clone)]
pub enum Token { pub enum Token {
#[token("functio")] #[token("functio")]

View file

@ -2,8 +2,8 @@
mod base_55; mod base_55;
mod error; mod error;
mod lexer;
mod parser; mod parser;
mod tokens;
mod variables; mod variables;
use clap::{App, Arg}; use clap::{App, Arg};

View file

@ -1,3 +1,5 @@
use std::collections::HashMap;
use crate::variables::Value; use crate::variables::Value;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -11,6 +13,7 @@ pub enum Expr {
}, },
FunctionDeclaration { FunctionDeclaration {
iden: String, iden: String,
args: Vec<Iden>,
body: Vec<Expr>, body: Vec<Expr>,
}, },
BfFDeclaration { BfFDeclaration {
@ -22,6 +25,10 @@ pub enum Expr {
body: Vec<Expr>, body: Vec<Expr>,
}, },
FunctionCall {
iden: Iden,
args: HashMap<Iden, Value>,
},
Literal(Value), Literal(Value),
Melo(Iden), Melo(Iden),
} }

View file

@ -1,19 +1,19 @@
mod item; mod item;
mod ops;
mod utils; mod utils;
use item::Expr; use item::Expr;
use crate::{ use crate::{
error::{Error, ErrorKind}, error::{Error, ErrorKind},
lexer::PeekableLexer,
variables::Value, variables::Value,
}; };
use crate::{parser::item::Iden, tokens::Token}; use crate::{lexer::Token, parser::item::Iden};
use logos::Logos;
/// Parser structure / state machine /// Parser structure / state machine
pub struct Parser<'a> { pub struct Parser<'a> {
lexer: logos::Lexer<'a, Token>, lexer: PeekableLexer<'a>,
ast: Vec<Expr>, ast: Vec<Expr>,
} }
@ -21,7 +21,7 @@ impl<'a> Parser<'a> {
/// Create a new parser object /// Create a new parser object
pub fn new(source: &'a str) -> Self { pub fn new(source: &'a str) -> Self {
Self { Self {
lexer: Token::lexer(source), lexer: PeekableLexer::lexer(source),
ast: Vec::new(), ast: Vec::new(),
} }
} }
@ -50,6 +50,7 @@ impl<'a> Parser<'a> {
let start = self.lexer.span().start; let start = self.lexer.span().start;
match token { match token {
Token::Identifier(_) => self.parse_ops(token),
// Control flow // Control flow
Token::If => self.if_cond(), Token::If => self.if_cond(),
@ -110,13 +111,14 @@ impl<'a> Parser<'a> {
fn function_declaration(&mut self) -> Result<Expr, Error> { fn function_declaration(&mut self) -> Result<Expr, Error> {
let iden = self.require_iden()?; let iden = self.require_iden()?;
self.require(Token::LeftParenthesis)?; self.require(Token::LeftParenthesis)?;
let args = vec![];
self.require(Token::RightParenthesis)?; self.require(Token::RightParenthesis)?;
self.require(Token::LeftBrace)?; self.require(Token::LeftBrace)?;
// Parse function body // Parse function body
let body = self.parse_body()?; let body = self.parse_body()?;
Ok(Expr::FunctionDeclaration { iden, body }) Ok(Expr::FunctionDeclaration { iden, args, body })
} }
/// Declare BF FFI Function /// Declare BF FFI Function

30
src/parser/ops.rs Normal file
View file

@ -0,0 +1,30 @@
use std::collections::HashMap;
use super::*;
impl<'a> Parser<'a> {
pub(super) fn parse_ops(&mut self, token: Token) -> Result<Expr, Error> {
let iden = if let Token::Identifier(i) = token {
Iden(i)
} else {
unimplemented!()
};
let mut buf = Vec::new();
buf.push(match self.lexer.peek() {
Some(Token::LeftParenthesis) => self.fn_call(iden)?,
_ => unimplemented!(),
});
Ok(buf[0].clone())
}
fn fn_call(&mut self, iden: Iden) -> Result<Expr, Error> {
self.require(Token::RightParenthesis)?;
Ok(Expr::FunctionCall {
iden,
args: HashMap::new(),
})
}
}

View file

@ -1,5 +1,5 @@
use crate::error::{Error, ErrorKind}; use crate::error::{Error, ErrorKind};
use crate::tokens::Token; use crate::lexer::Token;
use crate::variables::Abool; use crate::variables::Abool;
use super::{item::Expr, Parser}; use super::{item::Expr, Parser};