make expression

master
nothendev 2023-05-04 20:31:20 +03:00
parent 1460d09f95
commit 1731cf0172
6 changed files with 95 additions and 17 deletions

View File

@ -3,3 +3,9 @@ Use core.Int;
Constant Hi = "WHY???/\n";
Alias Yo = Byte;
Constant Version = Make Structure Version {
major: 1,
minor: 0,
patch: 0
};

View File

@ -1,5 +1,7 @@
//! **note** the order of fields is the order of parsing.
use std::collections::HashMap;
/// An IDL module.
///
/// Parsing order:
@ -15,7 +17,7 @@ pub struct IDLModule {
#[derive(Debug)]
pub enum Item {
Interface(ItemInterface),
_Interface(ItemInterface),
Alias(ItemAlias),
Constant(ItemConstant),
}
@ -23,8 +25,13 @@ pub enum Item {
#[derive(Debug)]
pub struct Function {
pub name: String,
pub takes: Vec<Type>,
pub returns: Type
}
// why
pub type Type = String;
#[derive(Debug)]
pub struct ItemInterface {
pub name: String,
@ -51,6 +58,19 @@ pub struct UseDecl {
#[derive(Debug)]
pub enum Expr {
Literal(Literal),
_IdentAccess(String),
Make(ExprMake)
}
#[derive(Debug)]
pub enum ExprMake {
Structure(Box<MakeStructure>)
}
#[derive(Debug)]
pub struct MakeStructure {
pub name: String,
pub params: HashMap<String, Expr>
}
#[derive(Debug)]

View File

@ -1,6 +1,6 @@
use std::{
fmt::Display,
ops::{Add, Range},
ops::{Add, Range, AddAssign},
};
use logos::Logos;
@ -9,10 +9,10 @@ use logos::Logos;
#[logos(skip r"[ \t\n\f]+")]
pub enum Token {
#[token("{")]
LeftBrace,
LeftCurly,
#[token("}")]
RightBrace,
RightCurly,
#[token("(")]
LeftParen,
@ -42,7 +42,7 @@ pub enum Token {
Dot,
// why
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| dbg!(lex.slice()).strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| lex.slice().strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
StringLiteral(String),
#[regex(r"'.'", |lex| lex.slice().strip_prefix('\'')?.strip_suffix('\'')?.parse().ok())]
@ -75,6 +75,8 @@ pub enum Ident {
Alias,
#[token("Use")]
Use,
#[token("Make")]
Make,
#[regex(r"[a-zA-Z_][a-zA-Z\d_]*", |lex| lex.slice().parse().ok())]
Other(String),
}
@ -135,6 +137,11 @@ impl Add for Span {
self.concat(rhs)
}
}
impl AddAssign for Span {
fn add_assign(&mut self, rhs: Self) {
*self = self.clone() + rhs;
}
}
#[derive(Debug, Clone)]
pub struct Spanned<T>(pub T, pub Span);

View File

@ -1,10 +1,6 @@
#![feature(result_option_inspect)]
#![allow(non_snake_case)]
use lexer::Token;
use logos::Logos;
use parser::Parser;
mod ast;
mod lexer;
mod parser;
@ -12,10 +8,10 @@ mod parser;
const TEST: &str = include_str!("../assets/why.idl");
fn main() {
let res = Parser::new(TEST).parse();
let res = parser::parse(TEST);
match res {
Ok(ast) => {
dbg!(ast);
println!("{:?}", ast);
}
Err(e) => println!("{}", e),
}

View File

@ -1,6 +1,8 @@
use std::collections::HashMap;
use crate::{
ast::{Expr, Literal, NumberLiteral},
lexer::{NumberSuffix, Spanned, Token},
ast::{Expr, ExprMake, Literal, MakeStructure, NumberLiteral},
lexer::{Ident, NumberSuffix, Spanned, Token},
unwrap_match,
};
@ -13,6 +15,17 @@ impl<'a> Parser<'a> {
Token::StringLiteral(_) | Token::NumberLiteral(_) | Token::CharLiteral(_) => {
self._ask_literal()?.map(Expr::Literal)
}
Token::Ident(Ident::Make) => {
self.eat();
match self.tokens.next()?.0 {
Token::Ident(Ident::Structure) => self
._ask_struct_init()?
.map(Box::new)
.map(ExprMake::Structure)
.map(Expr::Make),
_ => return Err(self.expected("a Make expression")),
}
}
_ => return Err(self.expected("an expression")),
})
}
@ -24,7 +37,7 @@ impl<'a> Parser<'a> {
Token::CharLiteral(chr) => Spanned(Literal::Char(chr), span),
Token::NumberLiteral(number) => {
let lit = if let Spanned(Token::NumberSuffix(_), sp) = self.tokens.peek()? {
span = span + sp;
span += sp;
use NumberLiteral::*;
Literal::Number(
@ -36,7 +49,11 @@ impl<'a> Parser<'a> {
NumberSuffix::I8 => I8(number as i8),
NumberSuffix::U16 => U16(number as u16),
NumberSuffix::I16 => I16(number as i16),
_ => todo!(),
NumberSuffix::U32 => U32(number as u32),
NumberSuffix::I32 => I32(number as i32),
NumberSuffix::U64 => U64(number as u64),
NumberSuffix::I64 => I64(number),
_ => return Err(self.expected("a non-floating number suffix"))
},
)
} else {
@ -48,4 +65,36 @@ impl<'a> Parser<'a> {
_ => return Err(self.expected("a literal")),
})
}
fn _ask_struct_init(&mut self) -> Result<Spanned<MakeStructure>, ParserError> {
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, _) = self.get_real(
|token| matches!(token, Token::LeftCurly),
"an opening curly brace (`{`)",
)?;
let mut params = HashMap::<String, Expr>::new();
loop {
match self.tokens.peek()?.0 {
Token::Ident(_) => {
let Spanned(ident, _) = self.ask_ident().unwrap();
self.get_real(|token| matches!(token, Token::Colon), "a colon")?;
let Spanned(value, _) = self.ask_expr()?;
params.insert(ident, value);
if let Token::Comma = self.tokens.peek()?.0 {
self.eat();
};
}
Token::RightCurly => break,
_ => return Err(self.expected("an identifier or a closing curly brace (`}`)")),
}
}
if let Spanned(Token::RightCurly, ccSp) = self.tokens.next()? {
return Ok(Spanned(MakeStructure { name, params }, nSp + ccSp));
};
Err(self.expected("something"))
}
}

View File

@ -34,7 +34,7 @@ impl<'a> TokenIterator<'a> {
None => self.lexer.find(token_is_not_comment).and_then(Result::ok),
};
let nxt = n.map(|token| Spanned(token, Span(self.lexer.span())));
println!("[NEXT] {:#?}", nxt);
// println!("[NEXT] {:#?}", nxt);
nxt.ok_or(ParserError::UnexpectedEOF)
}
@ -47,7 +47,7 @@ impl<'a> TokenIterator<'a> {
pub fn peek(&mut self) -> Result<Spanned<&Token>, ParserError> {
let span = Span(self.lexer.span());
let peek = self._peek().map(|token| Spanned(token, span));
println!("[PEEK] {:#?}", peek);
// println!("[PEEK] {:#?}", peek);
peek.ok_or(ParserError::UnexpectedEOF)
}