forked from AbleScript/ablescript
Added Peekable Lexer
- Added wrapper for Lexer with `peek()` method - Renamed `token` module to `lexer` as it more describe it's function - Started work on operator flow
This commit is contained in:
parent
00a464321a
commit
3e019621b2
|
@ -1,3 +1 @@
|
|||
if (true) {
|
||||
var a = 3;
|
||||
}
|
||||
a()
|
|
@ -1,7 +1,62 @@
|
|||
use logos::{Lexer, Logos};
|
||||
use logos::{Lexer, Logos, Span};
|
||||
|
||||
use crate::variables::Abool;
|
||||
|
||||
pub struct PeekableLexer<'source> {
|
||||
lexer: Lexer<'source, Token>,
|
||||
peeked: Option<Option<Token>>,
|
||||
}
|
||||
|
||||
impl<'source> PeekableLexer<'source> {
|
||||
pub fn lexer(source: &'source str) -> Self {
|
||||
Self {
|
||||
lexer: Token::lexer(source),
|
||||
peeked: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the next() value without advancing the iterator.
|
||||
#[inline]
|
||||
pub fn peek(&mut self) -> &Option<Token> {
|
||||
if self.peeked.is_none() {
|
||||
self.peeked = Some(self.lexer.next());
|
||||
}
|
||||
self.peeked.as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// Get the range for the current token in `Source`.
|
||||
#[inline]
|
||||
pub fn span(&self) -> Span {
|
||||
self.lexer.span()
|
||||
}
|
||||
|
||||
/// Get a string slice of the current token.
|
||||
#[inline]
|
||||
pub fn slice(&self) -> &'source str {
|
||||
self.lexer.slice()
|
||||
}
|
||||
|
||||
/// Get a slice of remaining source, starting at the end of current token.
|
||||
#[inline]
|
||||
pub fn remainder(&self) -> &'source str {
|
||||
self.lexer.remainder()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'source> Iterator for PeekableLexer<'source> {
|
||||
type Item = Token;
|
||||
|
||||
/// Advances the iterator and returns the next value.
|
||||
///
|
||||
/// Returns [`None`] when iteration is finished.
|
||||
/// Individual iterator implementations may choose to resume iteration, and so calling `next()`
|
||||
/// again may or may not eventually start returning [`Some(Item)`] again at some point.
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.lexer.next()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Logos, Debug, PartialEq, Clone)]
|
||||
pub enum Token {
|
||||
#[token("functio")]
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
mod base_55;
|
||||
mod error;
|
||||
mod lexer;
|
||||
mod parser;
|
||||
mod tokens;
|
||||
mod variables;
|
||||
|
||||
use clap::{App, Arg};
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::variables::Value;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -11,6 +13,7 @@ pub enum Expr {
|
|||
},
|
||||
FunctionDeclaration {
|
||||
iden: String,
|
||||
args: Vec<Iden>,
|
||||
body: Vec<Expr>,
|
||||
},
|
||||
BfFDeclaration {
|
||||
|
@ -22,6 +25,10 @@ pub enum Expr {
|
|||
body: Vec<Expr>,
|
||||
},
|
||||
|
||||
FunctionCall {
|
||||
iden: Iden,
|
||||
args: HashMap<Iden, Value>,
|
||||
},
|
||||
Literal(Value),
|
||||
Melo(Iden),
|
||||
}
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
mod item;
|
||||
mod ops;
|
||||
mod utils;
|
||||
|
||||
use item::Expr;
|
||||
|
||||
use crate::{
|
||||
error::{Error, ErrorKind},
|
||||
lexer::PeekableLexer,
|
||||
variables::Value,
|
||||
};
|
||||
use crate::{parser::item::Iden, tokens::Token};
|
||||
|
||||
use logos::Logos;
|
||||
use crate::{lexer::Token, parser::item::Iden};
|
||||
|
||||
/// Parser structure / state machine
|
||||
pub struct Parser<'a> {
|
||||
lexer: logos::Lexer<'a, Token>,
|
||||
lexer: PeekableLexer<'a>,
|
||||
ast: Vec<Expr>,
|
||||
}
|
||||
|
||||
|
@ -21,7 +21,7 @@ impl<'a> Parser<'a> {
|
|||
/// Create a new parser object
|
||||
pub fn new(source: &'a str) -> Self {
|
||||
Self {
|
||||
lexer: Token::lexer(source),
|
||||
lexer: PeekableLexer::lexer(source),
|
||||
ast: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
@ -50,6 +50,7 @@ impl<'a> Parser<'a> {
|
|||
let start = self.lexer.span().start;
|
||||
|
||||
match token {
|
||||
Token::Identifier(_) => self.parse_ops(token),
|
||||
// Control flow
|
||||
Token::If => self.if_cond(),
|
||||
|
||||
|
@ -110,13 +111,14 @@ impl<'a> Parser<'a> {
|
|||
fn function_declaration(&mut self) -> Result<Expr, Error> {
|
||||
let iden = self.require_iden()?;
|
||||
self.require(Token::LeftParenthesis)?;
|
||||
let args = vec![];
|
||||
self.require(Token::RightParenthesis)?;
|
||||
|
||||
self.require(Token::LeftBrace)?;
|
||||
// Parse function body
|
||||
let body = self.parse_body()?;
|
||||
|
||||
Ok(Expr::FunctionDeclaration { iden, body })
|
||||
Ok(Expr::FunctionDeclaration { iden, args, body })
|
||||
}
|
||||
|
||||
/// Declare BF FFI Function
|
||||
|
|
30
src/parser/ops.rs
Normal file
30
src/parser/ops.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use super::*;
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
pub(super) fn parse_ops(&mut self, token: Token) -> Result<Expr, Error> {
|
||||
let iden = if let Token::Identifier(i) = token {
|
||||
Iden(i)
|
||||
} else {
|
||||
unimplemented!()
|
||||
};
|
||||
|
||||
let mut buf = Vec::new();
|
||||
|
||||
buf.push(match self.lexer.peek() {
|
||||
Some(Token::LeftParenthesis) => self.fn_call(iden)?,
|
||||
_ => unimplemented!(),
|
||||
});
|
||||
|
||||
Ok(buf[0].clone())
|
||||
}
|
||||
|
||||
fn fn_call(&mut self, iden: Iden) -> Result<Expr, Error> {
|
||||
self.require(Token::RightParenthesis)?;
|
||||
Ok(Expr::FunctionCall {
|
||||
iden,
|
||||
args: HashMap::new(),
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
use crate::error::{Error, ErrorKind};
|
||||
use crate::tokens::Token;
|
||||
use crate::lexer::Token;
|
||||
use crate::variables::Abool;
|
||||
|
||||
use super::{item::Expr, Parser};
|
||||
|
|
Loading…
Reference in a new issue