Starting work on parser improvements

- Parser should parse single expressions
This commit is contained in:
Erin 2021-04-26 10:44:42 +02:00 committed by ondra05
parent 31c9fb3203
commit 47400ee2ce
5 changed files with 66 additions and 6 deletions

View file

@ -8,7 +8,8 @@ pub struct Error {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ErrorKind { pub enum ErrorKind {
SyntaxError, SyntaxError(String),
EndOfTokenStream,
} }
impl Error { impl Error {

View file

@ -32,7 +32,7 @@ fn main() {
// Parse // Parse
let mut parser = Parser::new(&source); let mut parser = Parser::new(&source);
let ast = parser.parse(); let ast = parser.init();
println!("{:#?}", ast); println!("{:#?}", ast);
} }
None => { None => {

View file

@ -11,6 +11,7 @@ use logos::Logos;
/// Parser structure / state machine /// Parser structure / state machine
pub struct Parser<'a> { pub struct Parser<'a> {
lexer: logos::Lexer<'a, Token>, lexer: logos::Lexer<'a, Token>,
ast: Vec<Expr>,
} }
impl<'a> Parser<'a> { impl<'a> Parser<'a> {
@ -18,9 +19,33 @@ impl<'a> Parser<'a> {
pub fn new(source: &'a str) -> Self { pub fn new(source: &'a str) -> Self {
Self { Self {
lexer: Token::lexer(source), lexer: Token::lexer(source),
ast: Vec::new(),
} }
} }
pub fn init(&mut self) -> Result<Vec<Expr>, Error> {
loop {
let token = self.lexer.next();
if token.is_none() {
return Ok(self.ast.clone());
};
let expr = self.parse_expr(&token)?;
self.ast.push(expr);
}
}
fn parse_expr(&mut self, token: &Option<Token>) -> Result<Expr, Error> {
if matches!(token, None) {
return Err(Error {
kind: ErrorKind::EndOfTokenStream,
position: self.lexer.span(),
});
}
Ok(todo!())
}
/*
/// Start parsing Token Vector into Abstract Syntax Tree /// Start parsing Token Vector into Abstract Syntax Tree
pub fn parse(&mut self) -> Vec<Expr> { pub fn parse(&mut self) -> Vec<Expr> {
let mut ast = vec![]; let mut ast = vec![];
@ -46,6 +71,7 @@ impl<'a> Parser<'a> {
ast ast
} }
*/
/// Parse variable declaration /// Parse variable declaration
/// ///
@ -62,7 +88,7 @@ impl<'a> Parser<'a> {
} }
_ => { _ => {
return Err(Error { return Err(Error {
kind: ErrorKind::SyntaxError, kind: ErrorKind::SyntaxError("Unexpected token".to_owned()),
position: self.lexer.span(), position: self.lexer.span(),
}) })
} }
@ -80,7 +106,7 @@ impl<'a> Parser<'a> {
// TODO: Arguments // TODO: Arguments
self.require(Token::RightParenthesis)?; self.require(Token::RightParenthesis)?;
self.require(Token::LeftBrace)?; self.require(Token::LeftBrace)?;
let body = self.parse(); let body = vec![];
Ok(Expr::FunctionDeclaration { iden, body }) Ok(Expr::FunctionDeclaration { iden, body })
} }
@ -91,7 +117,21 @@ impl<'a> Parser<'a> {
fn bff_declaration(&mut self) -> Result<Expr, Error> { fn bff_declaration(&mut self) -> Result<Expr, Error> {
let iden = self.require(Token::Identifier)?; let iden = self.require(Token::Identifier)?;
self.require(Token::LeftBrace)?; self.require(Token::LeftBrace)?;
let code = self.require(Token::String)?; // <-- Nasty hack, but works let mut code = String::new();
while let Some(token) = self.lexer.next() {
code.push_str(match token {
Token::OpGt
| Token::OpLt
| Token::Addition
| Token::Subtract
| Token::FullStop
| Token::Comma
| Token::LeftBracket
| Token::RightBracket => self.lexer.slice(),
Token::RightBrace => break,
_ => break,
});
}
self.require(Token::RightBrace)?; self.require(Token::RightBrace)?;
Ok(Expr::BfFDeclaration { iden, code }) Ok(Expr::BfFDeclaration { iden, code })
} }

View file

@ -27,7 +27,7 @@ impl<'a> Parser<'a> {
Ok(self.lexer.slice().to_owned()) Ok(self.lexer.slice().to_owned())
} else { } else {
Err(Error { Err(Error {
kind: ErrorKind::SyntaxError, kind: ErrorKind::SyntaxError("Mysterious parse error".to_owned()),
position: self.lexer.span(), position: self.lexer.span(),
}) })
} }

View file

@ -44,6 +44,12 @@ pub enum Token {
#[token(";")] #[token(";")]
Semicolon, Semicolon,
#[token(".")]
FullStop,
#[token(",")]
Comma,
#[regex(r"#.*")] #[regex(r"#.*")]
Comment, Comment,
@ -63,6 +69,19 @@ pub enum Token {
#[token("=")] #[token("=")]
Assignment, Assignment,
// Logical operators
#[token("<")]
OpLt,
#[token(">")]
OpGt,
#[token("==")]
OpEq,
#[token("!=")]
OpNeq,
/// Base52 based character ('a') /// Base52 based character ('a')
#[token("'.*'")] #[token("'.*'")]
Char, Char,