aidl commit almost mvp

This commit is contained in:
nothendev 2023-05-04 14:19:32 +03:00
parent 6a94aa3e84
commit b0020ff838
7 changed files with 329 additions and 60 deletions

View file

@ -1,5 +1,4 @@
// core provides lots of useful types like String and Byte Use core;
use core;
Constant VERSION Version{ Constant VERSION Version{
major: 1, major: 1,

43
programs/aidl/src/ast.rs Normal file
View file

@ -0,0 +1,43 @@
//! **note** the order of fields is the order of parsing.
/// An IDL module.
///
/// Parsing order:
/// - use declarations,
/// - items
#[derive(Debug)]
pub struct IDLModule {
// why: only allow use before other items
// parser will error if use is present in any other place
pub uses: Vec<UseDecl>,
pub items: Vec<Item>
}
#[derive(Debug)]
pub enum Item {
Interface(ItemInterface),
Type(ItemType)
}
#[derive(Debug)]
pub struct Function {
pub name: String,
}
#[derive(Debug)]
pub struct ItemInterface {
pub name: String,
pub functions: Vec<Function>
}
#[derive(Debug)]
pub struct ItemType {
pub name: String,
pub referree: String
}
#[derive(Debug)]
pub struct UseDecl {
pub module: String
}

111
programs/aidl/src/lexer.rs Normal file
View file

@ -0,0 +1,111 @@
use std::ops::{Range, Add};
use logos::Logos;
#[derive(Logos, Debug, PartialEq)]
#[logos(skip r"[ \t\n\f]+")]
pub enum Token {
#[token("{")]
LeftBrace,
#[token("}")]
RightBrace,
#[token("(")]
LeftParen,
#[token(")")]
RightParen,
#[token(";")]
Semicolon,
#[token(":")]
Colon,
#[token("<")]
LeftArrow,
#[token(">")]
RightArrow,
#[token(",")]
Comma,
#[token("=")]
Equals,
//#[regex(r#"[A-z]+"#, |lex| lex.slice().parse().ok())]
//Literal(String),
#[regex(r#"[A-z]+"#, |lex| Ident::lexer(lex.slice()).next().and_then(Result::ok))]
Ident(Ident),
#[regex("use [a-zA-Z/]+;", |lex| lex.slice().parse().ok())]
Component(String),
#[regex("U[0-9]+", |lex| lex.slice().parse().ok())]
UnsignedType(String),
#[regex("I[0-9]+", |lex| lex.slice().parse().ok())]
SignedType(String),
#[regex(r"//.*", |lex| lex.slice().parse().ok())]
Comment(String),
}
#[derive(Logos, Debug, PartialEq, Eq)]
pub enum Ident {
#[token("Interface")]
Interface,
#[token("Function")]
Function,
#[token("Constant")]
Constant,
#[token("Structure")]
Structure,
#[token("Type")]
Type,
#[token("Use")]
Use,
#[regex(r"[A-z]+", |lex| lex.slice().parse().ok())]
Other(String)
}
#[derive(Debug, Clone)]
pub struct Span(pub Range<usize>);
impl Span {
pub const ZERO: Self = Self(0..0);
pub fn lower(&self) -> usize {
self.0.start
}
pub fn upper(&self) -> usize {
self.0.end
}
pub fn concat(self, other: Span) -> Self {
use std::cmp::{min, max};
Self(min(self.lower(), other.lower())..max(self.upper(), other.upper()))
}
}
impl Add for Span {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
self.concat(rhs)
}
}
#[derive(Debug, Clone)]
pub struct Spanned<T>(pub T, pub Span);
impl<T> Spanned<T> {
pub fn new<const N: usize>(thing: T, spans: [Span; N]) -> Self {
Self(thing, spans.into_iter().fold(Span::ZERO, Span::concat))
}
pub fn map<R>(self, f: impl Fn(T) -> R) -> Spanned<R> {
Spanned(f(self.0), self.1)
}
}

View file

@ -1,62 +1,26 @@
#![allow(non_snake_case)]
use logos::Logos; use logos::Logos;
use parser::Parser;
#[derive(Logos, Debug, PartialEq)] use crate::lexer::Token;
#[logos(skip r"[ \t\n\f]+")] // Ignore this regex pattern between tokens mod ast;
enum Token { mod lexer;
#[token("{")] mod parser;
LeftBrace,
#[token("}")] const VFS: &str = include_str!("../assets/vfs.idl");
RightBrace,
#[token("(")]
LeftParen,
#[token(")")]
RightParen,
#[token(";")]
Semicolon,
#[token(":")]
Colon,
#[token("<")]
LeftArrow,
#[token(">")]
RightArrow,
#[token(",")]
Comma,
#[token("=")]
Equals,
#[regex(r#"[A-z]+"#, |lex| lex.slice().parse().ok())]
Literal(String),
#[regex("use [a-zA-Z/]+;", |lex| lex.slice().parse().ok())]
Component(String),
#[regex("U[0-9]+", |lex| lex.slice().parse().ok())]
UnsignedType(String),
#[regex("I[0-9]+", |lex| lex.slice().parse().ok())]
SignedType(String),
#[regex(r"//[ a-zA-Z!-+]+", |lex| lex.slice().parse().ok())]
Comment(String),
}
fn main() { fn main() {
let mut lex = Token::lexer(include_str!("../../../programs/aidl/assets/vfs.idl")); dbg!(Token::lexer(VFS).for_each(|a| println!("{:#?}", a)));
dbg!(Parser::new(VFS).parse());
}
for token in lex { #[macro_export]
// let ok_token = token.ok(); macro_rules! unwrap_match {
// if ok_token.is_some() { ($x:expr, $m:pat => $a:expr) => {
// println!("{:?}", ok_token.unwrap()); match $x {
// } $m => $a,
println!("{:?}", token); _ => unreachable!()
} }
};
} }

150
programs/aidl/src/parser.rs Normal file
View file

@ -0,0 +1,150 @@
use logos::{Logos, SpannedIter};
use crate::{
ast::{IDLModule, Item, ItemInterface, ItemType, UseDecl},
lexer::{Span, Spanned, Token, Ident},
};
use std::{
iter::{Iterator, Peekable, Filter},
ops::Range,
};
type Wtf<'a> = Peekable<
Filter<SpannedIter<'a, Token>, Box<dyn Fn(&(Result<Token, ()>, Range<usize>)) -> bool>>,
>;
struct TokenIterator<'a> {
spanned: Wtf<'a>,
}
fn token_is_not_comment((ref a, ..): &(Result<Token, ()>, Range<usize>)) -> bool {
!matches!(a, Err(_) | Ok(Token::Comment(..)))
}
impl<'a> TokenIterator<'a> {
pub fn new(src: &'a str) -> Self {
let spanned = Token::lexer(src)
.spanned()
.filter(
Box::new(token_is_not_comment) as Box<dyn Fn(&(Result<Token, ()>, Range<usize>)) -> bool>,
)
.peekable();
Self { spanned }
}
pub fn next(&mut self) -> Option<Spanned<Token>> {
let nxt = self
.spanned
.next()
.and_then(|(token, span)| Some(Spanned(token.ok()?, Span(span))));
println!("[NEXT] {:#?}", nxt);
nxt
}
pub fn peek(&mut self) -> Option<Spanned<&Token>> {
let peek = self
.spanned
.peek()
.and_then(|(token, span)| Some(Spanned(token.as_ref().ok()?, Span(span.clone()))));
println!("[PEEK] {:#?}", peek);
peek
}
}
pub struct Parser<'a> {
tokens: TokenIterator<'a>,
}
impl<'a> Parser<'a> {
pub fn new(src: &'a str) -> Self {
Self {
tokens: TokenIterator::new(src),
}
}
fn get_real(&mut self, matcher: impl Fn(&Token) -> bool) -> Option<Spanned<Token>> {
if matcher(self.tokens.peek()?.0) {
self.tokens.next()
} else {
None
}
}
fn semi(&mut self) -> Option<Span> {
Some(self.get_real(|token| matches!(token, Token::Semicolon))?.1)
}
fn ask_ident(&mut self) -> Option<Spanned<String>> {
Some(crate::unwrap_match!(
self.get_real(|token| matches!(token, Token::Ident(Ident::Other(_))))?,
Spanned(Token::Ident(Ident::Other(ident)), span) =>
Spanned(ident, span)
))
}
fn ask_interface(&mut self) -> Option<Spanned<ItemInterface>> {
let Spanned(_, kSp) =
self.get_real(|token| matches!(token, Token::Ident(Ident::Interface)))?;
let Spanned(ident, iSp) = self.ask_ident()?;
Some(Spanned::new(
ItemInterface {
name: ident,
functions: vec![],
},
[kSp, iSp, self.semi()?],
))
}
fn ask_typealias(&mut self) -> Option<Spanned<ItemType>> {
let Spanned(_, kSp) =
self.get_real(|token| matches!(token, Token::Ident(Ident::Type)))?;
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, eqSp) = self.get_real(|token| matches!(token, Token::Equals))?;
let Spanned(referree, rSp) = self.ask_ident()?;
Some(Spanned::new(
ItemType { name, referree },
[kSp, nSp, eqSp, rSp, self.semi()?],
))
}
fn ask_item(&mut self) -> Option<Spanned<Item>> {
Some(match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => None?,
Token::Ident(keyword) => match keyword {
Ident::Interface => self.ask_interface()?.map(Item::Interface),
Ident::Type => self.ask_typealias()?.map(Item::Type),
_ => None?,
},
_ => None?,
})
}
fn ask_use(&mut self) -> Option<Spanned<UseDecl>> {
let Spanned(_, kSp) =
self.get_real(|token| matches!(token, Token::Ident(Ident::Use)))?;
let Spanned(name, nSp) = self.ask_ident()?;
Some(Spanned::new(
UseDecl { module: name },
[kSp, nSp, self.semi()?],
))
}
pub fn parse(mut self) -> IDLModule {
IDLModule {
uses: fill_while(|| self.ask_use()),
items: fill_while(|| self.ask_item()),
}
}
}
fn fill_while<T>(mut f: impl FnMut() -> Option<Spanned<T>>) -> Vec<T> {
let mut real = vec![];
while let Some(Spanned(t, _)) = f() {
real.push(t);
}
real
}

View file

@ -1 +0,0 @@
nightly

3
rust-toolchain.toml Normal file
View file

@ -0,0 +1,3 @@
[toolchain]
channel = "nightly"
components = ["cargo", "clippy", "rustfmt", "rust-analyzer"]