make expression
This commit is contained in:
parent
d89faedfb1
commit
7e954ca279
|
@ -3,3 +3,9 @@ Use core.Int;
|
||||||
|
|
||||||
Constant Hi = "WHY???/\n";
|
Constant Hi = "WHY???/\n";
|
||||||
Alias Yo = Byte;
|
Alias Yo = Byte;
|
||||||
|
|
||||||
|
Constant Version = Make Structure Version {
|
||||||
|
major: 1,
|
||||||
|
minor: 0,
|
||||||
|
patch: 0
|
||||||
|
};
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
//! **note** the order of fields is the order of parsing.
|
//! **note** the order of fields is the order of parsing.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
/// An IDL module.
|
/// An IDL module.
|
||||||
///
|
///
|
||||||
/// Parsing order:
|
/// Parsing order:
|
||||||
|
@ -15,7 +17,7 @@ pub struct IDLModule {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Item {
|
pub enum Item {
|
||||||
Interface(ItemInterface),
|
_Interface(ItemInterface),
|
||||||
Alias(ItemAlias),
|
Alias(ItemAlias),
|
||||||
Constant(ItemConstant),
|
Constant(ItemConstant),
|
||||||
}
|
}
|
||||||
|
@ -23,8 +25,13 @@ pub enum Item {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Function {
|
pub struct Function {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub takes: Vec<Type>,
|
||||||
|
pub returns: Type
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// why
|
||||||
|
pub type Type = String;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ItemInterface {
|
pub struct ItemInterface {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
@ -51,6 +58,19 @@ pub struct UseDecl {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Expr {
|
pub enum Expr {
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
|
_IdentAccess(String),
|
||||||
|
Make(ExprMake)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum ExprMake {
|
||||||
|
Structure(Box<MakeStructure>)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct MakeStructure {
|
||||||
|
pub name: String,
|
||||||
|
pub params: HashMap<String, Expr>
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::{
|
use std::{
|
||||||
fmt::Display,
|
fmt::Display,
|
||||||
ops::{Add, Range},
|
ops::{Add, Range, AddAssign},
|
||||||
};
|
};
|
||||||
|
|
||||||
use logos::Logos;
|
use logos::Logos;
|
||||||
|
@ -9,10 +9,10 @@ use logos::Logos;
|
||||||
#[logos(skip r"[ \t\n\f]+")]
|
#[logos(skip r"[ \t\n\f]+")]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
#[token("{")]
|
#[token("{")]
|
||||||
LeftBrace,
|
LeftCurly,
|
||||||
|
|
||||||
#[token("}")]
|
#[token("}")]
|
||||||
RightBrace,
|
RightCurly,
|
||||||
|
|
||||||
#[token("(")]
|
#[token("(")]
|
||||||
LeftParen,
|
LeftParen,
|
||||||
|
@ -42,7 +42,7 @@ pub enum Token {
|
||||||
Dot,
|
Dot,
|
||||||
|
|
||||||
// why
|
// why
|
||||||
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| dbg!(lex.slice()).strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
|
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| lex.slice().strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
|
||||||
StringLiteral(String),
|
StringLiteral(String),
|
||||||
|
|
||||||
#[regex(r"'.'", |lex| lex.slice().strip_prefix('\'')?.strip_suffix('\'')?.parse().ok())]
|
#[regex(r"'.'", |lex| lex.slice().strip_prefix('\'')?.strip_suffix('\'')?.parse().ok())]
|
||||||
|
@ -75,6 +75,8 @@ pub enum Ident {
|
||||||
Alias,
|
Alias,
|
||||||
#[token("Use")]
|
#[token("Use")]
|
||||||
Use,
|
Use,
|
||||||
|
#[token("Make")]
|
||||||
|
Make,
|
||||||
#[regex(r"[a-zA-Z_][a-zA-Z\d_]*", |lex| lex.slice().parse().ok())]
|
#[regex(r"[a-zA-Z_][a-zA-Z\d_]*", |lex| lex.slice().parse().ok())]
|
||||||
Other(String),
|
Other(String),
|
||||||
}
|
}
|
||||||
|
@ -135,6 +137,11 @@ impl Add for Span {
|
||||||
self.concat(rhs)
|
self.concat(rhs)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl AddAssign for Span {
|
||||||
|
fn add_assign(&mut self, rhs: Self) {
|
||||||
|
*self = self.clone() + rhs;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Spanned<T>(pub T, pub Span);
|
pub struct Spanned<T>(pub T, pub Span);
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
#![feature(result_option_inspect)]
|
#![feature(result_option_inspect)]
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use lexer::Token;
|
|
||||||
use logos::Logos;
|
|
||||||
use parser::Parser;
|
|
||||||
|
|
||||||
mod ast;
|
mod ast;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
mod parser;
|
mod parser;
|
||||||
|
@ -12,10 +8,10 @@ mod parser;
|
||||||
const TEST: &str = include_str!("../assets/why.idl");
|
const TEST: &str = include_str!("../assets/why.idl");
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let res = Parser::new(TEST).parse();
|
let res = parser::parse(TEST);
|
||||||
match res {
|
match res {
|
||||||
Ok(ast) => {
|
Ok(ast) => {
|
||||||
dbg!(ast);
|
println!("{:?}", ast);
|
||||||
}
|
}
|
||||||
Err(e) => println!("{}", e),
|
Err(e) => println!("{}", e),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{Expr, Literal, NumberLiteral},
|
ast::{Expr, ExprMake, Literal, MakeStructure, NumberLiteral},
|
||||||
lexer::{NumberSuffix, Spanned, Token},
|
lexer::{Ident, NumberSuffix, Spanned, Token},
|
||||||
unwrap_match,
|
unwrap_match,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -13,6 +15,17 @@ impl<'a> Parser<'a> {
|
||||||
Token::StringLiteral(_) | Token::NumberLiteral(_) | Token::CharLiteral(_) => {
|
Token::StringLiteral(_) | Token::NumberLiteral(_) | Token::CharLiteral(_) => {
|
||||||
self._ask_literal()?.map(Expr::Literal)
|
self._ask_literal()?.map(Expr::Literal)
|
||||||
}
|
}
|
||||||
|
Token::Ident(Ident::Make) => {
|
||||||
|
self.eat();
|
||||||
|
match self.tokens.next()?.0 {
|
||||||
|
Token::Ident(Ident::Structure) => self
|
||||||
|
._ask_struct_init()?
|
||||||
|
.map(Box::new)
|
||||||
|
.map(ExprMake::Structure)
|
||||||
|
.map(Expr::Make),
|
||||||
|
_ => return Err(self.expected("a Make expression")),
|
||||||
|
}
|
||||||
|
}
|
||||||
_ => return Err(self.expected("an expression")),
|
_ => return Err(self.expected("an expression")),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -24,7 +37,7 @@ impl<'a> Parser<'a> {
|
||||||
Token::CharLiteral(chr) => Spanned(Literal::Char(chr), span),
|
Token::CharLiteral(chr) => Spanned(Literal::Char(chr), span),
|
||||||
Token::NumberLiteral(number) => {
|
Token::NumberLiteral(number) => {
|
||||||
let lit = if let Spanned(Token::NumberSuffix(_), sp) = self.tokens.peek()? {
|
let lit = if let Spanned(Token::NumberSuffix(_), sp) = self.tokens.peek()? {
|
||||||
span = span + sp;
|
span += sp;
|
||||||
|
|
||||||
use NumberLiteral::*;
|
use NumberLiteral::*;
|
||||||
Literal::Number(
|
Literal::Number(
|
||||||
|
@ -36,7 +49,11 @@ impl<'a> Parser<'a> {
|
||||||
NumberSuffix::I8 => I8(number as i8),
|
NumberSuffix::I8 => I8(number as i8),
|
||||||
NumberSuffix::U16 => U16(number as u16),
|
NumberSuffix::U16 => U16(number as u16),
|
||||||
NumberSuffix::I16 => I16(number as i16),
|
NumberSuffix::I16 => I16(number as i16),
|
||||||
_ => todo!(),
|
NumberSuffix::U32 => U32(number as u32),
|
||||||
|
NumberSuffix::I32 => I32(number as i32),
|
||||||
|
NumberSuffix::U64 => U64(number as u64),
|
||||||
|
NumberSuffix::I64 => I64(number),
|
||||||
|
_ => return Err(self.expected("a non-floating number suffix"))
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
|
@ -48,4 +65,36 @@ impl<'a> Parser<'a> {
|
||||||
_ => return Err(self.expected("a literal")),
|
_ => return Err(self.expected("a literal")),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn _ask_struct_init(&mut self) -> Result<Spanned<MakeStructure>, ParserError> {
|
||||||
|
let Spanned(name, nSp) = self.ask_ident()?;
|
||||||
|
let Spanned(_, _) = self.get_real(
|
||||||
|
|token| matches!(token, Token::LeftCurly),
|
||||||
|
"an opening curly brace (`{`)",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut params = HashMap::<String, Expr>::new();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match self.tokens.peek()?.0 {
|
||||||
|
Token::Ident(_) => {
|
||||||
|
let Spanned(ident, _) = self.ask_ident().unwrap();
|
||||||
|
self.get_real(|token| matches!(token, Token::Colon), "a colon")?;
|
||||||
|
let Spanned(value, _) = self.ask_expr()?;
|
||||||
|
params.insert(ident, value);
|
||||||
|
if let Token::Comma = self.tokens.peek()?.0 {
|
||||||
|
self.eat();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Token::RightCurly => break,
|
||||||
|
_ => return Err(self.expected("an identifier or a closing curly brace (`}`)")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Spanned(Token::RightCurly, ccSp) = self.tokens.next()? {
|
||||||
|
return Ok(Spanned(MakeStructure { name, params }, nSp + ccSp));
|
||||||
|
};
|
||||||
|
|
||||||
|
Err(self.expected("something"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ impl<'a> TokenIterator<'a> {
|
||||||
None => self.lexer.find(token_is_not_comment).and_then(Result::ok),
|
None => self.lexer.find(token_is_not_comment).and_then(Result::ok),
|
||||||
};
|
};
|
||||||
let nxt = n.map(|token| Spanned(token, Span(self.lexer.span())));
|
let nxt = n.map(|token| Spanned(token, Span(self.lexer.span())));
|
||||||
println!("[NEXT] {:#?}", nxt);
|
// println!("[NEXT] {:#?}", nxt);
|
||||||
nxt.ok_or(ParserError::UnexpectedEOF)
|
nxt.ok_or(ParserError::UnexpectedEOF)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ impl<'a> TokenIterator<'a> {
|
||||||
pub fn peek(&mut self) -> Result<Spanned<&Token>, ParserError> {
|
pub fn peek(&mut self) -> Result<Spanned<&Token>, ParserError> {
|
||||||
let span = Span(self.lexer.span());
|
let span = Span(self.lexer.span());
|
||||||
let peek = self._peek().map(|token| Spanned(token, span));
|
let peek = self._peek().map(|token| Spanned(token, span));
|
||||||
println!("[PEEK] {:#?}", peek);
|
// println!("[PEEK] {:#?}", peek);
|
||||||
peek.ok_or(ParserError::UnexpectedEOF)
|
peek.ok_or(ParserError::UnexpectedEOF)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue