AIDL #1

Open
nothendev wants to merge 11 commits from nothendev/ableos_userland:master into master
26 changed files with 1727 additions and 123 deletions

143
Cargo.lock generated
View File

@ -35,7 +35,14 @@ dependencies = [
name = "aidl"
version = "0.1.0"
dependencies = [
"logos 0.13.0",
"codespan-reporting",
"derive_more",
"itertools",
"logos",
"proc-macro2",
"quote",
"syn 2.0.15",
"thiserror",
]
[[package]]
@ -113,6 +120,22 @@ dependencies = [
"toml 0.5.9 (git+https://git.ablecorp.us/theoddgarlic/toml-rs)",
]
[[package]]
name = "codespan-reporting"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
dependencies = [
"termcolor",
"unicode-width",
]
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "cpuio"
version = "0.3.2"
@ -139,6 +162,25 @@ dependencies = [
name = "derelict_microarchitecture"
version = "0.1.0"
[[package]]
name = "derive_more"
version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version",
"syn 1.0.105",
]
[[package]]
name = "either"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "fnv"
version = "1.0.7"
@ -194,6 +236,15 @@ dependencies = [
"hashbrown 0.12.3",
]
[[package]]
name = "itertools"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "libc"
version = "0.2.138"
@ -225,22 +276,13 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "logos"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf8b031682c67a8e3d5446840f9573eb7fe26efe7ec8d195c9ac4c0647c502f1"
dependencies = [
"logos-derive 0.12.1",
]
[[package]]
name = "logos"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c000ca4d908ff18ac99b93a062cb8958d331c3220719c52e77cb19cc6ac5d2c1"
dependencies = [
"logos-derive 0.13.0",
"logos-derive",
]
[[package]]
@ -257,20 +299,6 @@ dependencies = [
"syn 2.0.15",
]
[[package]]
name = "logos-derive"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d849148dbaf9661a6151d1ca82b13bb4c4c128146a88d05253b38d4e2f496c"
dependencies = [
"beef",
"fnv",
"proc-macro2",
"quote",
"regex-syntax",
"syn 1.0.105",
]
[[package]]
name = "logos-derive"
version = "0.13.0"
@ -382,6 +410,21 @@ dependencies = [
"serde",
]
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "semver"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "serde"
version = "1.0.148"
@ -481,6 +524,15 @@ dependencies = [
"versioning",
]
[[package]]
name = "termcolor"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
dependencies = [
"winapi-util",
]
[[package]]
name = "thiserror"
version = "1.0.37"
@ -586,11 +638,17 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]]
name = "unicode-width"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "uri"
version = "0.1.0"
dependencies = [
"logos 0.13.0",
"logos",
]
[[package]]
@ -631,9 +689,40 @@ dependencies = [
name = "wat2wasm"
version = "0.1.0"
dependencies = [
"logos 0.12.1",
"logos",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "winnow"
version = "0.4.4"

View File

@ -6,4 +6,11 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
logos = "0.13.0"
codespan-reporting = "0.11.1"
derive_more = "0.99.17"
itertools = "0.10.5"
logos = "0"
proc-macro2 = "1.0.56"
quote = "1.0.26"
syn = "2.0.15"
thiserror = "1"

View File

@ -4,7 +4,7 @@ The example implementation will be in rust
IDL | Rust
__________
boolean | bool
Boolean | bool
I8 | i8
I16 | i16
I32 | i32
@ -15,8 +15,8 @@ U32 | u32
U64 | u64
F32 | f32
F64 | f64
Constant X Y Z | const X: Y = Z;
Type | type
Constant X = Make Y { Z } | const X: Y = Y { Z };
Alias | type
Vector<X> | Vec<X>
Array[X;Y] | [X;Y]
Function X accepts(YX) returns(ZX) | fn X(YX) -> ZX
Array<X, Y> | [X; Y]
Function X Takes(YX) Returns(ZX) | fn X(YX) -> ZX

View File

@ -1,20 +1,20 @@
Type Byte = U8;
Type String = Vector<Byte>;
Alias Byte = u8;
Alias Int = u32;
Enumurate Boolean{
False = 0,
True = 1,
Enumeration Nothing {}
Enumeration Option<T> {
None,
Some(T)
}
Union Option<T>{
None,
Some<T>
Enumeration Result<T, E> {
Ok(T),
Err(E)
}
Structure Version {
major: Byte,
minor: Byte,
patch: Byte,
};
major: Byte,
minor: Byte,
patch: Byte,
}

View File

@ -0,0 +1,21 @@
#![crate_name = "aidl_core"]
#![crate_type = "rlib"]
#![no_implicit_prelude]
extern crate core as rust_core;
extern crate alloc as rust_alloc;
pub use self::rust_core::{option::Option, result::Result};
pub use self::rust_alloc::{vec::Vec as Vector, string::String};
pub type Nothing = ();
pub type Byte = u8;
pub type Int = u32;
#[derive(Debug, Clone, Copy)]
pub struct Version {
pub major: Byte,
pub minor: Byte,
pub patch: Byte
}

View File

@ -0,0 +1,19 @@
Type UUID = Array<U8, 16>;
Type Nanoseconds = U32;
Structure Duration {
secs: U64
nanos: Nanoseconds
}
Structure LinkedList<T> {
data: T,
child: Option<LinkedList<T>>,
}
Constant VERSION = Version {
major: 1,
minor: 0,
patch: 0
};

View File

@ -1,13 +0,0 @@
Type UUID = Array[U8; 16];
Type Nanoseconds = U32;
Structure Duration{
secs: U64,
nanos: Nanoseconds,
}
Structure LinkedList{
data: Any,
child: Option<LinkedList>,
}

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,6 @@
Interface Thing {
Function moves Takes(Move Self) Returns(Self);
Function immut_ref Takes(Reference Self);
Function mut_ref Takes(Mutable Reference Self);
}

View File

@ -1,24 +1,26 @@
Module vfs;
// core provides lots of useful types like String and Byte
use core;
Use core.Version;
Use core.Vector;
Use core.String;
Constant VERSION Version{
major: 1,
minor: 0,
patch: 0,
}
Constant VERSION = Make Version {
major: 1,
minor: 0,
patch: 0,
};
Type Path = String;
Alias Path = String;
Structure File {
name: String,
data: Vector<Byte>,
name: String,
data: Vector<Byte>,
}
Interface File{
function new accepts(Path) returns(None);
Interface File {
Function create Takes(Path) Returns(Nothing);
// Open in this iteration assumes the file exists
function open accepts(Path) returns(File);
Function open Takes(Path) Returns(File);
function close accepts(File) returns(None);
}
Function close Takes(File) Returns(Nothing);
}

View File

@ -0,0 +1,26 @@
#![crate_name = "aidl_vfs"]
#![crate_type = "rlib"]
#![no_implicit_prelude]
extern crate aidl_core;
use aidl_core::{Vector, Version, String};
pub const VERSION: Version = Version {
major: 1,
minor: 0,
patch: 0,
};
pub type Path = String;
pub struct FFile {
pub name: String
}
pub trait File {
fn fields(&self) -> &FFile;
fn fields_mut(&mut self) -> &mut FFile;
fn into_fields(self) -> FFile where Self: Sized;
fn new(path: Path) -> Self;
}

View File

@ -0,0 +1,28 @@
Use core.Byte;
Use core.Int;
Constant Hi = "WHY???/\n";
Alias Yo = Byte;
Constant Version = Make Version {
major: 1, minor: 0, patch: 0
};
Interface Iface {
Function hello Takes(Int, Boolean,) Returns(Int);
}
Function a_free_function Returns(Boolean);
Structure Hello {
world: Boolean,
prompt: Option<String>,
}
Enumeration Reality {
Dead(Boolean, Boolean),
Alive {
health: Int,
dying: Boolean,
},
}

181
programs/aidl/src/ast.rs Normal file
View File

@ -0,0 +1,181 @@
//! **note** the order of fields is the order of parsing.
use std::collections::HashMap;
/// An IDL module.
///
/// Parsing order:
/// - use declarations,
/// - items
#[derive(Debug)]
pub struct IDLModule {
pub name: String,
// why: only allow use before other items
// parser will error if use is present in any other place
pub uses: Vec<UseDecl>,
pub items: Vec<Item>,
}
#[derive(Debug)]
pub enum Item {
Interface(ItemInterface),
Alias(ItemAlias),
Constant(ItemConstant),
Function(Function),
Structure(ItemStructure),
Enumeration(ItemEnumeration),
}
#[derive(Debug, Default)]
pub struct Function {
pub name: String,
pub takes: Vec<Type>,
pub returns: Type,
}
#[derive(Debug)]
pub struct Type {
pub name: String,
pub arguments: TypeArguments,
}
impl Type {
pub fn infer() -> Self {
Self {
name: String::from(INFER_TYPE),
arguments: TypeArguments::None,
}
}
}
pub const NOTHING_TYPE: &str = "Nothing";
pub const INFER_TYPE: &str = "_";
impl Default for Type {
fn default() -> Self {
Self {
name: String::from(NOTHING_TYPE),
arguments: TypeArguments::None,
}
}
}
#[derive(Debug, Default)]
pub enum TypeArguments {
/// TypeName
#[default]
None,
/// TypeName<T1, T2, T3, TN>
AngleBracketed(Vec<Box<Type>>),
}
pub fn nothing() -> Type {
Type::default()
}
#[derive(Debug)]
pub struct ItemInterface {
pub name: String,
pub functions: Vec<Function>,
}
#[derive(Debug)]
pub struct ItemStructure {
pub name: String,
pub fields: HashMap<String, Type>,
pub arguments: TypeArguments,
}
#[derive(Debug)]
pub struct ItemAlias {
pub name: String,
pub referree: Type,
}
#[derive(Debug)]
pub struct ItemConstant {
pub name: String,
pub expr: Expr,
}
#[derive(Debug)]
pub struct ItemEnumeration {
pub name: String,
pub arguments: TypeArguments,
pub variants: Vec<EnumerationVariant>,
}
#[derive(Debug)]
pub struct EnumerationVariant {
pub name: String,
pub content: EnumerationContent,
}
#[derive(Debug, Default)]
pub enum EnumerationContent {
#[default]
None,
Tuple(Vec<Type>),
Structure(HashMap<String, Type>),
Value(NumberLiteral),
}
#[derive(Debug)]
pub struct UseDecl {
pub path: (String, Option<String>)
}
#[derive(Debug)]
pub enum Expr {
Literal(Literal),
_IdentAccess(String),
Make(Box<ExprMake>),
}
#[derive(Debug)]
pub struct ExprMake {
pub name: String,
pub params: HashMap<String, Expr>,
}
#[derive(Debug)]
pub enum Literal {
String(String),
Number(NumberLiteral),
Char(char),
}
#[derive(Debug, derive_more::Display)]
pub enum NumberLiteral {
#[display(fmt = "{_0}ptr")]
Ptr(usize),
#[display(fmt = "{_0}u8")]
U8(u8),
#[display(fmt = "{_0}i8")]
I8(i8),
#[display(fmt = "{_0}u16")]
U16(u16),
#[display(fmt = "{_0}i16")]
I16(i16),
#[display(fmt = "{_0}u32")]
U32(u32),
#[display(fmt = "{_0}i32")]
I32(i32),
#[display(fmt = "{_0}u64")]
U64(u64),
#[display(fmt = "{_0}i64")]
I64(i64),
#[display(fmt = "{_0}")]
Infer(i64),
}
/// seg1.seg2.seg3.segN
#[derive(Debug)]
pub struct ModulePath {
pub segments: Vec<String>,
}

View File

@ -0,0 +1,130 @@
use std::default::default;
use crate::{ast::{IDLModule, ItemInterface, TypeArguments, Type}, unwrap_match};
use itertools::Itertools;
use proc_macro2::{Ident, Span, TokenStream};
use quote::{quote, ToTokens};
use syn::{Attribute, ItemExternCrate, ItemTrait, ItemUse, LitStr, Meta, Path, UsePath, Generics, punctuated::Punctuated, TypeArray, LitInt};
fn attr_inner(meta: Meta) -> Attribute {
Attribute {
pound_token: default(),
style: syn::AttrStyle::Inner(default()),
bracket_token: default(),
meta,
}
}
fn attr_just(name: &'static str) -> Attribute {
attr_inner(Meta::Path(Path::from(Ident::new(name, Span::call_site()))))
}
fn attr_inner_eq(name: &'static str, expr: &str) -> Attribute {
attr_inner(Meta::NameValue(syn::MetaNameValue {
path: Path::from(Ident::new(name, Span::call_site())),
eq_token: default(),
value: syn::Expr::Lit(syn::ExprLit {
attrs: vec![],
lit: syn::Lit::Str(LitStr::new(expr, Span::call_site())),
}),
}))
}
fn extern_crate(name: &str) -> ItemExternCrate {
ItemExternCrate {
attrs: vec![],
vis: syn::Visibility::Inherited,
extern_token: default(),
crate_token: default(),
ident: Ident::new(name, Span::call_site()),
rename: None,
semi_token: default(),
}
}
fn make_use(a: &str, b: &str) -> ItemUse {
ItemUse {
attrs: vec![],
vis: syn::Visibility::Inherited,
use_token: default(),
tree: syn::UseTree::Path(UsePath {
tree: Box::new(syn::UseTree::Name(syn::UseName {
ident: Ident::new(b, Span::call_site()),
})),
ident: Ident::new(a, Span::call_site()),
colon2_token: default(),
}),
semi_token: default(),
leading_colon: None,
}
}
fn _gen_type(ty: Type) -> syn::Type {
fn make_array(mut args: Vec<Box<Type>>) -> TypeArray {
let box arity = args.pop().unwrap();
let box real = args.pop().unwrap();
drop(args);
TypeArray { bracket_token: default(), elem: Box::new(gen_type(real)), semi_token: default(), len: syn::Expr::Lit(syn::ExprLit { attrs: vec![], lit: syn::Lit::Int(LitInt::new(&arity.name, Span::call_site())) }) }
}
match ty.name.as_str() {
"Array" => syn::Type::Array(make_array(unwrap_match!(ty.arguments, TypeArguments::AngleBracketed(angle) => angle))),
name => syn::Type::Path(syn::TypePath { qself: None, path: Path::from(Ident::new(name, Span::call_site())) })
}
}
// fn gen_iface(interface: ItemInterface) -> ItemTrait {
// ItemTrait {
// attrs: default(),
// vis: syn::Visibility::Public(default()),
// unsafety: None,
// auto_token: None,
// restriction: None,
// trait_token: default(),
// ident: Ident::new(&interface.name, Span::call_site()),
// generics: ,
// colon_token: (),
// supertraits: (),
// brace_token: (),
// items: (),
// }
// }
pub fn generate(module: IDLModule) -> TokenStream {
let name = String::from("aidl_") + &module.name;
let attrs: TokenStream = [
attr_inner_eq("crate_name", &name),
attr_inner_eq("crate_type", "rlib"),
attr_just("no_implicit_prelude"),
]
.into_iter()
.map(ToTokens::into_token_stream)
.collect();
let uses: Vec<_> = module
.uses
.into_iter()
.map(|a| a.path)
.map(|(a, b)| (String::from("aidl_") + &a, b)) // aidl_core.Something
.collect();
let extern_crates: TokenStream = uses
.iter()
.map(|(a, _)| a.as_str())
.unique()
.map(extern_crate)
.map(ToTokens::into_token_stream)
.collect();
let use_defs: TokenStream = uses
.iter()
.filter_map(|(ref a, ref b)| b.as_ref().map(|b| make_use(a.as_str(), b.as_str())))
.map(ToTokens::into_token_stream)
.collect();
quote! {
#attrs
#extern_crates
#use_defs
}
}

232
programs/aidl/src/lexer.rs Normal file
View File

@ -0,0 +1,232 @@
use std::{
fmt::Display,
ops::{Add, AddAssign, Range},
};
use logos::Logos;
#[derive(Logos, Debug, PartialEq, derive_more::Display, Clone)]
#[logos(skip r"[ \t\n\f]+")]
pub enum Token {
#[token("{")]
#[display(fmt = "{{")]
LeftCurly,
#[token("}")]
#[display(fmt = "}}")]
RightCurly,
#[token("(")]
#[display(fmt = "(")]
LeftParen,
#[token(")")]
#[display(fmt = ")")]
RightParen,
#[token(";")]
#[display(fmt = ";")]
Semicolon,
#[token(":")]
#[display(fmt = ":")]
Colon,
#[token("<")]
#[display(fmt = "<")]
LeftArrow,
#[token(">")]
#[display(fmt = ">")]
RightArrow,
#[token(",")]
#[display(fmt = ",")]
Comma,
#[token("=")]
#[display(fmt = "=")]
Equals,
#[token(".")]
#[display(fmt = ".")]
Dot,
// why
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| lex.slice().strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
#[display(fmt = "\"{_0}\"")]
StringLiteral(String),
#[regex(r"'.'", |lex| lex.slice().strip_prefix('\'')?.strip_suffix('\'')?.parse().ok())]
#[display(fmt = "{_0}")]
CharLiteral(char),
#[regex(r#"(-)?\d+"#, |lex| lex.slice().parse().ok())]
#[display(fmt = "{_0}")]
NumberLiteral(i64),
#[regex(
"(ptr|u8|i8|u16|i16|u32|i32|u64|i64|f32|f64)",
|lex| NumberSuffix::lexer(lex.slice()).next().and_then(Result::ok)
)]
#[display(fmt = "{_0}")]
NumberSuffix(NumberSuffix),
#[regex(r#"[a-zA-Z_][a-zA-Z\d_]*"#, |lex| Ident::lexer(lex.slice()).next().and_then(Result::ok))]
#[display(fmt = "{_0}")]
Ident(Ident),
#[regex(r"//.*", |lex| lex.slice().parse().ok())]
#[display(fmt = "//{_0}")]
Comment(String),
}
#[derive(Logos, Debug, Clone, PartialEq, Eq, derive_more::Display)]
pub enum Ident {
#[token("Module")]
#[display(fmt = "Module")]
Module,
#[token("Interface")]
#[display(fmt = "Interface")]
Interface,
#[token("Function")]
#[display(fmt = "Function")]
Function,
#[token("Constant")]
#[display(fmt = "Constant")]
Constant,
#[token("Structure")]
#[display(fmt = "Structure")]
Structure,
#[token("Alias")]
#[display(fmt = "Alias")]
Alias,
#[token("Enumeration")]
#[display(fmt = "Enumeration")]
Enumeration,
#[token("Use")]
#[display(fmt = "Use")]
Use,
#[token("Make")]
#[display(fmt = "Make")]
Make,
#[token("Takes")]
#[display(fmt = "Takes")]
Takes,
#[token("Returns")]
#[display(fmt = "Returns")]
Returns,
#[token("_")]
#[display(fmt = "_")]
Underscore,
#[regex(r"[a-zA-Z_][a-zA-Z\d_]*", |lex| lex.slice().parse().ok())]
#[display(fmt = "{_0}")]
Other(String),
}
#[derive(Logos, Debug, Clone, Copy, PartialEq, Eq, derive_more::Display)]
pub enum NumberSuffix {
#[token("ptr")]
#[display(fmt = "ptr")]
Ptr,
#[token("u8")]
#[display(fmt = "u8")]
U8,
#[token("i8")]
#[display(fmt = "i8")]
I8,
#[token("u16")]
#[display(fmt = "u16")]
U16,
#[token("i16")]
#[display(fmt = "i16")]
I16,
#[token("u32")]
#[display(fmt = "u32")]
U32,
#[token("i32")]
#[display(fmt = "i32")]
I32,
#[token("u64")]
#[display(fmt = "u64")]
U64,
#[token("i64")]
#[display(fmt = "i64")]
I64,
#[token("f32")]
#[display(fmt = "f32")]
F32,
#[token("f64")]
#[display(fmt = "f64")]
F64,
}
impl NumberSuffix {
pub const ALL_SUFFIXES: [&str; 11] = [
"ptr",
"u8",
"i8",
"u16",
"i16",
"u32",
"i32",
"u64",
"i64",
"f32",
"f64"
];
}
#[derive(Debug, Clone)]
pub struct Span(pub Range<usize>);
impl Span {
pub const ZERO: Self = Self(0..0);
pub fn lower(&self) -> usize {
self.0.start
}
pub fn upper(&self) -> usize {
self.0.end
}
pub fn concat(self, other: Span) -> Self {
use std::cmp::{max, min};
Self(min(self.lower(), other.lower())..max(self.upper(), other.upper()))
}
}
impl Display for Span {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}..{}", self.lower(), self.upper())
}
}
impl Add for Span {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
self.concat(rhs)
}
}
impl AddAssign for Span {
fn add_assign(&mut self, rhs: Self) {
*self = self.clone() + rhs;
}
}
#[derive(Debug, Clone)]
pub struct Spanned<T>(pub T, pub Span);
impl<T> Spanned<T> {
pub fn new<const N: usize>(thing: T, spans: [Span; N]) -> Self {
Self(thing, spans.into_iter().fold(Span::ZERO, Span::concat))
}
pub fn map<R>(self, f: impl Fn(T) -> R) -> Spanned<R> {
Spanned(f(self.0), self.1)
}
}
impl<T: Display> Display for Spanned<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} @ {}", self.0, self.1)
}
}

View File

@ -1,62 +1,168 @@
use logos::Logos;
#![feature(result_option_inspect)]
#![feature(box_patterns)]
#![feature(default_free_fn)]
#![allow(non_snake_case)]
#[derive(Logos, Debug, PartialEq)]
#[logos(skip r"[ \t\n\f]+")] // Ignore this regex pattern between tokens
enum Token {
#[token("{")]
LeftBrace,
use std::{fmt::Display, path::Path, process::exit};
#[token("}")]
RightBrace,
use ast::IDLModule;
use codegen::generate;
use codespan_reporting::{
diagnostic::{Diagnostic, Label, Severity},
files::SimpleFile,
term::{
emit,
termcolor::{StandardStream, StandardStreamLock},
Config,
},
};
use lexer::{NumberSuffix, Token};
use parser::TokenIterator;
#[token("(")]
LeftParen,
use crate::lexer::Spanned;
#[token(")")]
RightParen,
mod ast;
mod codegen;
mod lexer;
mod parser;
#[token(";")]
Semicolon,
fn precheck<N: Display + Clone, S: AsRef<str>>(
writer: &mut StandardStreamLock<'_>,
config: &Config,
file: &SimpleFile<N, S>,
) {
let mut lexer = TokenIterator::new(file.source().as_ref());
let mut diagnostics = vec![];
let mut previous = lexer.peek().ok().map(|Spanned(a, b)| Spanned(a.clone(), b));
#[token(":")]
Colon,
while let Ok(Spanned(token, span)) = lexer.next() {
let prev = Spanned(token.clone(), span.clone());
match token {
Token::Ident(lexer::Ident::Other(t)) if t == "Type" => {
diagnostics.push(
Diagnostic::error()
.with_labels(vec![Label::primary((), span.0)])
.with_message("`Type` is not supported anymore.")
.with_notes(vec!["use `Alias` instead of `Type`".into()]),
);
}
Token::Ident(lexer::Ident::Other(ident))
if lexer
.peek()
.is_ok_and(|Spanned(a, _)| matches!(a, Token::LeftCurly))
&& previous.is_some_and(|Spanned(ref a, _)| matches!(a, Token::Equals)) =>
{
diagnostics.push(
Diagnostic::error()
.with_message("Unknown expression")
.with_labels(vec![Label::primary((), span.0.clone())])
.with_notes(vec![
format!("add `Make` before the structure name to create a Make expression that will construct the `{ident}` structure"
),
]),
);
}
Token::Ident(lexer::Ident::Other(ident))
if NumberSuffix::ALL_SUFFIXES.contains(&ident.to_lowercase().as_str()) =>
{
diagnostics.push(
Diagnostic::warning()
.with_message("Potentially invalid use of an uppercased number type")
.with_labels(vec![Label::primary((), span.0)])
.with_notes(vec![
format!("Replace {ident} with {}", ident.to_lowercase()),
"Code generation might fail".into(),
]),
);
}
_ => {}
}
previous = Some(prev);
}
#[token("<")]
LeftArrow,
if !diagnostics.is_empty() {
let mut was_fatal = false;
for diagnostic in diagnostics {
if let Severity::Error | Severity::Bug = &diagnostic.severity {
was_fatal = true;
}
#[token(">")]
RightArrow,
emit(writer, config, file, &diagnostic).unwrap();
}
#[token(",")]
Comma,
#[token("=")]
Equals,
#[regex(r#"[A-z]+"#, |lex| lex.slice().parse().ok())]
Literal(String),
#[regex("use [a-zA-Z/]+;", |lex| lex.slice().parse().ok())]
Component(String),
#[regex("U[0-9]+", |lex| lex.slice().parse().ok())]
UnsignedType(String),
#[regex("I[0-9]+", |lex| lex.slice().parse().ok())]
SignedType(String),
#[regex(r"//[ a-zA-Z!-+]+", |lex| lex.slice().parse().ok())]
Comment(String),
if was_fatal {
exit(1);
}
}
}
fn main() {
let mut lex = Token::lexer(include_str!("../../../programs/aidl/assets/vfs.idl"));
let mut args = std::env::args();
args.next().unwrap();
for token in lex {
// let ok_token = token.ok();
// if ok_token.is_some() {
// println!("{:?}", ok_token.unwrap());
// }
println!("{:?}", token);
let mut ast: Option<IDLModule> = None;
if let Some(file) = args.next() {
let path = Path::new(&file);
let codespan_file = codespan_reporting::files::SimpleFile::new(
&file,
std::fs::read_to_string(path).unwrap(),
);
let writer = StandardStream::stdout(codespan_reporting::term::termcolor::ColorChoice::Auto);
let config = Config {
tab_width: 2,
..Default::default()
};
precheck(&mut writer.lock(), &config, &codespan_file);
match parser::parse(codespan_file.source()) {
Ok(ast_) => {
println!("{:#?}", ast_);
ast = Some(ast_);
}
Err(e) => {
let msg = e.to_string();
let label = match e {
parser::ParserError::UnexpectedEOF => Label::primary(
(),
(codespan_file.source().len() - 1)..codespan_file.source().len(),
)
.with_message("Unexpected end of file here"),
parser::ParserError::Unexpected(expected, Spanned(got, span)) => {
Label::primary((), span.0)
.with_message(format!("Unexpected `{got}`, expected {expected}"))
}
parser::ParserError::PleaseStopParsingUse => unsafe {
std::hint::unreachable_unchecked()
},
};
let diagnostic = codespan_reporting::diagnostic::Diagnostic::error()
.with_message(msg)
.with_labels(vec![label]);
codespan_reporting::term::emit(
&mut writer.lock(),
&config,
&codespan_file,
&diagnostic,
)
.unwrap();
}
}
} else {
eprintln!("No file given. Aborting.");
}
let rust = generate(ast.unwrap());
println!("{}", rust);
}
#[macro_export]
macro_rules! unwrap_match {
($x:expr, $m:pat => $a:expr) => {
match $x {
$m => $a,
_ => unreachable!(),
}
};
}

View File

@ -0,0 +1,145 @@
use std::collections::HashMap;
use crate::{
ast::{EnumerationContent, EnumerationVariant, ItemEnumeration, Type},
lexer::{Ident, Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_enumeration(&mut self) -> Result<Spanned<ItemEnumeration>, ParserError> {
let Spanned(_, span) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Enumeration)),
"the `Enumeration` keyword",
)?;
let Spanned(Type { name, arguments }, _) = self.ask_type()?;
self.get_real(
|token| matches!(token, Token::LeftCurly),
"an opening curly brace",
)?;
let mut variants = vec![];
loop {
match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
let Spanned(variant_name, _) = self.ask_ident()?;
let mut content = EnumerationContent::None;
loop {
match self.tokens.peek()?.0 {
Token::LeftParen => {
self.eat();
let mut tuple = vec![];
loop {
match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
tuple.push(self.ask_type()?.0);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightParen => {}
_ => return Err(self.expected("a comma or closing parentheses")),
};
}
Token::RightParen => {
self.eat();
break;
}
_ => {
return Err(
self.expected("closing parentheses or a type")
)
}
}
}
content = EnumerationContent::Tuple(tuple);
}
Token::LeftCurly => {
self.eat();
let mut structure = HashMap::<String, Type>::new();
loop {
match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
let Spanned(field_name, _) = self.ask_ident()?;
self.get_real(
|token| matches!(token, Token::Colon),
"a colon",
)?;
structure.insert(field_name, self.ask_type()?.0);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightCurly => {}
_ => return Err(self.expected("a comma or closing curly braces")),
};
}
Token::RightCurly => {
self.eat();
break;
}
_ => {
return Err(
self.expected("closing parentheses or a type")
)
}
}
}
content = EnumerationContent::Structure(structure);
}
Token::Equals => {
self.eat();
content = EnumerationContent::Value(self._ask_number_literal()?.0);
}
_ => break,
}
}
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightCurly => {}
_ => return Err(self.expected("a comma or closing curly braces")),
}
variants.push(EnumerationVariant {
name: variant_name,
content,
});
}
Token::RightCurly => break,
_ => return Err(self.expected("a closing curly brace or a variant")),
}
}
if let Spanned(Token::RightCurly, _) = self.tokens.peek()? {
self.eat();
return Ok(Spanned(
ItemEnumeration {
name,
arguments,
variants,
},
span + self.tokens.span(),
));
};
Err(self.expected("???"))
}
}

View File

@ -0,0 +1,103 @@
use std::collections::HashMap;
use crate::{
ast::{Expr, ExprMake, Literal, NumberLiteral},
lexer::{Ident, NumberSuffix, Spanned, Token},
unwrap_match,
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_expr(&mut self) -> Result<Spanned<Expr>, ParserError> {
let Spanned(token, _) = self.tokens.peek()?;
Ok(match token {
Token::StringLiteral(_) | Token::NumberLiteral(_) | Token::CharLiteral(_) => {
self._ask_literal()?.map(Expr::Literal)
}
Token::Ident(Ident::Make) => {
self.eat();
self._ask_struct_init()?.map(Box::new).map(Expr::Make)
}
_ => return Err(self.expected("an expression")),
})
}
pub fn _ask_number_literal(&mut self) -> Result<Spanned<NumberLiteral>, ParserError> {
match self.tokens.next()? {
Spanned(Token::NumberLiteral(number), mut span) => {
let lit = if let Spanned(Token::NumberSuffix(_), sp) = self.tokens.peek()? {
span += sp;
use NumberLiteral::*;
match unwrap_match!(
self.tokens.next()?, Spanned(Token::NumberSuffix(suffering), _) => suffering) // eat suffix
{
NumberSuffix::Ptr => Ptr(number as usize),
NumberSuffix::U8 => U8(number as u8),
NumberSuffix::I8 => I8(number as i8),
NumberSuffix::U16 => U16(number as u16),
NumberSuffix::I16 => I16(number as i16),
NumberSuffix::U32 => U32(number as u32),
NumberSuffix::I32 => I32(number as i32),
NumberSuffix::U64 => U64(number as u64),
NumberSuffix::I64 => I64(number),
_ => return Err(self.expected("a non-floating number suffix"))
}
} else {
NumberLiteral::Infer(number)
};
Ok(Spanned(lit, span))
}
_ => Err(self.expected("a number literal")),
}
}
pub fn _ask_literal(&mut self) -> Result<Spanned<Literal>, ParserError> {
if let Spanned(Token::NumberLiteral(_), _) = self.tokens.peek()? {
return Ok(self._ask_number_literal()?.map(Literal::Number));
};
let Spanned(token, span) = self.tokens.next()?;
Ok(match token {
Token::StringLiteral(string) => Spanned(Literal::String(string), span),
Token::CharLiteral(chr) => Spanned(Literal::Char(chr), span),
_ => return Err(self.expected("a literal")),
})
}
fn _ask_struct_init(&mut self) -> Result<Spanned<ExprMake>, ParserError> {
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, _) = self.get_real(
|token| matches!(token, Token::LeftCurly),
"an opening curly brace (`{`)",
)?;
let mut params = HashMap::<String, Expr>::new();
loop {
match self.tokens.peek()?.0 {
Token::Ident(_) => {
let Spanned(ident, _) = self.ask_ident().unwrap();
self.get_real(|token| matches!(token, Token::Colon), "a colon")?;
let Spanned(value, _) = self.ask_expr()?;
params.insert(ident, value);
match self.tokens.peek()?.0 {
Token::Comma => self.eat(),
Token::RightCurly => {},
_ => return Err(self.expected("a comma or a closing curly brace"))
}
}
Token::RightCurly => break,
_ => return Err(self.expected("an identifier or a closing curly brace (`}`)")),
}
}
if let Spanned(Token::RightCurly, ccSp) = self.tokens.next()? {
return Ok(Spanned(ExprMake { name, params }, nSp + ccSp));
};
Err(self.expected("closing curly braces"))
}
}

View File

@ -0,0 +1,144 @@
use crate::{
ast::{nothing, Function, ItemInterface},
lexer::{Ident, Span, Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_interface(&mut self) -> Result<Spanned<ItemInterface>, ParserError> {
// Interface
let Spanned(_, mut span) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Interface)),
"the `Interface` keyword",
)?;
// InterfaceName
let Spanned(name, _) = self.ask_ident()?;
// {
self.get_real(
|token| matches!(token, Token::LeftCurly),
"opening curly brackets",
)?;
let mut functions = vec![];
loop {
match self.tokens.peek()? {
Spanned(Token::RightCurly, end) => {
self.eat();
span += end;
break;
}
Spanned(Token::Ident(Ident::Function), _) => functions.push(self.ask_function()?.0),
_ => return Err(self.expected("A function or closing curly braces")),
}
}
Ok(Spanned(ItemInterface { name, functions }, span))
}
pub fn ask_function(&mut self) -> Result<Spanned<Function>, ParserError> {
let Spanned(_, bsp) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Function)),
"the `Function` keyword",
)?;
let Spanned(name, _) = self.ask_ident()?;
let Spanned(next, esp) = self.tokens.next()?;
match next {
Token::Ident(Ident::Takes) => {
self.get_real(
|token| matches!(token, Token::LeftParen),
"Opening parentheses",
)?;
let mut takes = vec![];
let mut returns = nothing();
loop {
let Spanned(peeked, _) = self.tokens.peek()?;
match peeked {
Token::Ident(_) => {
takes.push(self.ask_type()?.0);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightParen => {}
_ => return Err(self.expected("a comma or closing parentheses")),
};
}
Token::RightParen => {
self.eat();
break;
}
_ => return Err(self.expected("closing parentheses or a type name")),
}
}
match self.tokens.next()?.0 {
Token::Semicolon => {}
Token::Ident(Ident::Returns) => {
self.get_real(
|token| matches!(token, Token::LeftParen),
"opening parentheses",
)?;
let Spanned(returns_, _) = self.ask_type()?;
returns = returns_;
self.get_real(
|token| matches!(token, Token::RightParen),
"closing parentheses",
)?;
self.semi()?;
}
_ => return Err(self.expected("a semicolon or a Returns clause")),
}
Ok(Spanned(
Function {
name,
takes,
returns,
},
bsp + Span(self.tokens.lexer.span()),
))
}
Token::Ident(Ident::Returns) => {
self.get_real(
|token| matches!(token, Token::LeftParen),
"Opening parentheses",
)?;
let Spanned(returns, _) = self.ask_type()?;
self.get_real(
|token| matches!(token, Token::RightParen),
"Closing parentheses",
)?;
Ok(Spanned(
Function {
name,
takes: Vec::new(),
returns,
},
bsp + self.semi()?,
))
}
Token::Semicolon => Ok(Spanned(
Function {
name,
takes: Vec::new(),
returns: nothing(),
},
bsp + esp,
)),
_ => Err(self.expected("a Takes clause, a Returns clause or a semicolon")),
}
}
}

View File

@ -0,0 +1,244 @@
mod enumeration;
mod expr;
mod interface;
mod structure;
mod types;
use logos::{Lexer, Logos};
use crate::{
ast::{IDLModule, Item, ItemAlias, ItemConstant, UseDecl},
lexer::{Ident, Span, Spanned, Token},
};
use std::iter::Iterator;
pub struct TokenIterator<'a> {
lexer: Lexer<'a, Token>,
peeked: Option<Option<Token>>,
}
fn token_is_not_comment(a: &Result<Token, ()>) -> bool {
!matches!(a, Err(_) | Ok(Token::Comment(..)))
}
impl<'a> TokenIterator<'a> {
pub fn new(src: &'a str) -> Self {
let lexer = Token::lexer(src);
Self {
lexer,
peeked: None,
}
}
pub fn next(&mut self) -> Result<Spanned<Token>, ParserError> {
let n = match self.peeked.take() {
Some(thing) => thing,
None => self.lexer.find(token_is_not_comment).and_then(Result::ok),
};
let nxt = n.map(|token| Spanned(token, Span(self.lexer.span())));
// println!("[NEXT] {:#?}", nxt);
nxt.ok_or(ParserError::UnexpectedEOF)
}
fn _peek(&mut self) -> Option<&Token> {
self.peeked
.get_or_insert_with(|| self.lexer.find(token_is_not_comment).and_then(Result::ok))
.as_ref()
}
pub fn peek(&mut self) -> Result<Spanned<&Token>, ParserError> {
let span = Span(self.lexer.span());
let peek = self._peek().map(|token| Spanned(token, span));
// println!("[PEEK] {:#?}", peek);
peek.ok_or(ParserError::UnexpectedEOF)
}
pub fn current(&self) -> Spanned<String> {
Spanned(self.lexer.slice().to_owned(), self.span())
}
pub fn span(&self) -> Span {
Span(self.lexer.span())
}
}
pub(crate) struct Parser<'a> {
tokens: TokenIterator<'a>,
}
impl<'a> Parser<'a> {
pub fn new(src: &'a str) -> Self {
Self {
tokens: TokenIterator::new(src),
}
}
fn eat(&mut self) {
let _ = self.tokens.next();
}
fn get_real(
&mut self,
matcher: impl Fn(&Token) -> bool,
expected: &'static str,
) -> Result<Spanned<Token>, ParserError> {
if matcher(self.tokens.peek()?.0) {
self.tokens.next()
} else {
Err(self.expected(expected))
}
}
fn expected(&self, expected: &'static str) -> ParserError {
ParserError::Unexpected(expected.to_owned(), self.tokens.current())
}
fn semi(&mut self) -> Result<Span, ParserError> {
Ok(self
.get_real(|token| matches!(token, Token::Semicolon), "a semicolon")?
.1)
}
fn ask_ident(&mut self) -> Result<Spanned<String>, ParserError> {
Ok(
match self.get_real(
|token| {
matches!(
token,
Token::Ident(Ident::Other(_) | Ident::Underscore) | Token::NumberSuffix(_)
)
},
"an identifier",
)? {
Spanned(Token::Ident(Ident::Other(ident)), span) => Spanned(ident, span),
Spanned(Token::Ident(Ident::Underscore), span) => Spanned("_".to_owned(), span),
Spanned(Token::NumberSuffix(suffix), span) => Spanned(suffix.to_string(), span),
_ => unreachable!(),
},
)
}
fn ask_alias(&mut self) -> Result<Spanned<ItemAlias>, ParserError> {
let Spanned(_, kSp) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Alias)),
"`Alias`",
)?;
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, eqSp) = self.get_real(|token| matches!(token, Token::Equals), "`=`")?;
let Spanned(referree, rSp) = self.ask_type()?;
Ok(Spanned::new(
ItemAlias { name, referree },
[kSp, nSp, eqSp, rSp, self.semi()?],
))
}
fn ask_constant(&mut self) -> Result<Spanned<ItemConstant>, ParserError> {
let Spanned(_, kSp) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Constant)),
"`Constant`",
)?;
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, eqSp) = self.get_real(|token| matches!(token, Token::Equals), "`=`")?;
let Spanned(expr, exprSp) = self.ask_expr()?;
Ok(Spanned::new(
ItemConstant { name, expr },
[kSp, nSp, eqSp, exprSp, self.semi()?],
))
}
fn ask_item(&mut self) -> Result<Spanned<Item>, ParserError> {
Ok(match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
Err(self.expected("a keyword, not just an identifier"))?
}
Token::Ident(keyword) => match keyword {
Ident::Interface => self.ask_interface()?.map(Item::Interface),
Ident::Structure => self.ask_structure()?.map(Item::Structure),
Ident::Alias => self.ask_alias()?.map(Item::Alias),
Ident::Constant => self.ask_constant()?.map(Item::Constant),
Ident::Function => self.ask_function()?.map(Item::Function),
Ident::Enumeration => self.ask_enumeration()?.map(Item::Enumeration),
_ => Err(self.expected("an item denoting keyword (Interface, Structure, Alias, Constant, Function, Enumeration)"))?,
},
_ => Err(self.expected("a keyword"))?,
})
}
fn ask_use(&mut self) -> Result<Spanned<UseDecl>, ParserError> {
let Spanned(_, span) = {
match self.tokens.peek()? {
Spanned(Token::Ident(Ident::Use), _) => Ok(self.tokens.next()?),
_ => Err(ParserError::PleaseStopParsingUse),
}
}?;
let mut path = (self.ask_ident()?.0, None);
if let Token::Dot = self.tokens.peek()?.0 {
self.eat();
path.1 = Some(self.ask_ident()?.0);
}
Ok(Spanned::new(UseDecl { path }, [span, self.semi()?]))
}
pub fn parse(mut self) -> Result<IDLModule, ParserError> {
Ok(IDLModule {
name: {
self.get_real(
|token| matches!(token, Token::Ident(Ident::Module)),
"the `Module` keyword",
)?;
let name = self.ask_ident()?.0;
self.semi()?;
name
},
uses: {
let mut real = vec![];
loop {
let r = self.ask_use();
match r {
Ok(Spanned(a, _)) => real.push(a),
Err(ParserError::UnexpectedEOF) => return Err(ParserError::UnexpectedEOF),
Err(ParserError::PleaseStopParsingUse) => break,
Err(unexpected @ ParserError::Unexpected(..)) => return Err(unexpected),
}
}
Ok(real)
}?,
items: fill_while(|| self.ask_item())?,
})
}
}
fn fill_while<T>(
mut f: impl FnMut() -> Result<Spanned<T>, ParserError>,
) -> Result<Vec<T>, ParserError> {
let mut real = vec![];
loop {
match f() {
Ok(Spanned(next, _)) => real.push(next),
Err(unexpected @ ParserError::Unexpected(..)) => return Err(unexpected),
Err(ParserError::UnexpectedEOF) => break,
Err(ParserError::PleaseStopParsingUse) => unreachable!(),
}
}
Ok(real)
}
pub fn parse(source: &str) -> Result<IDLModule, ParserError> {
Parser::new(source).parse()
}
#[derive(thiserror::Error, Debug)]
pub enum ParserError {
// expected, got
#[error("Unexpected `{_1}`, expected {_0}")]
Unexpected(String, Spanned<String>),
#[error("Unexpected end of file")]
UnexpectedEOF,
#[error("please stop.")]
PleaseStopParsingUse,
}

View File

@ -0,0 +1,58 @@
use std::collections::HashMap;
use crate::{
ast::{ItemStructure, Type},
lexer::{Ident, Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_structure(&mut self) -> Result<Spanned<ItemStructure>, ParserError> {
let Spanned(_, span) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Structure)),
"the `Structure` keyword",
)?;
let Spanned(Type { name, arguments }, _) = self.ask_type()?;
let Spanned(_, _) = self.get_real(
|token| matches!(token, Token::LeftCurly),
"an opening curly brace (`{`)",
)?;
let mut fields = HashMap::<String, Type>::new();
loop {
match self.tokens.peek()?.0 {
Token::Ident(_) => {
let Spanned(ident, _) = self.ask_ident().unwrap();
self.get_real(|token| matches!(token, Token::Colon), "a colon")?;
let Spanned(value, _) = self.ask_type()?;
fields.insert(ident, value);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightCurly => {}
_ => return Err(self.expected("a comma or closing curly braces")),
};
}
Token::RightCurly => break,
_ => return Err(self.expected("an identifier or a closing curly brace (`}`)")),
}
}
if let Spanned(Token::RightCurly, end) = self.tokens.next()? {
return Ok(Spanned(
ItemStructure {
name,
fields,
arguments,
},
span + end,
));
};
Err(self.expected("closing curly braces"))
}
}

View File

@ -0,0 +1,66 @@
use crate::{
ast::{Type, TypeArguments, INFER_TYPE},
lexer::{Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_type(&mut self) -> Result<Spanned<Type>, ParserError> {
if let Spanned(Token::NumberLiteral(_), _) = self.tokens.peek()? {
let Spanned(number, span) = self._ask_number_literal()?;
return Ok(Spanned(
Type {
name: number.to_string(),
arguments: TypeArguments::None,
},
span,
));
};
let Spanned(name, span) = self.ask_ident()?;
if name == INFER_TYPE {
return Ok(Spanned(Type::infer(), span));
}
let mut arguments = TypeArguments::None;
if let Spanned(crate::lexer::Token::LeftArrow, _) = self.tokens.peek()? {
self.eat(); // eat `<`
let mut args = vec![];
args.push(Box::new(self.ask_type()?.0));
match self.tokens.peek()?.0 {
Token::Comma => self.eat(),
Token::RightArrow => {}
_ => return Err(self.expected("a comma or closing angle brackets")),
};
loop {
match self.tokens.peek()? {
Spanned(Token::Ident(_) | Token::NumberLiteral(_), _) => {
args.push(Box::new(self.ask_type()?.0));
match self.tokens.peek()?.0 {
Token::Comma => self.eat(),
Token::RightArrow => {}
_ => return Err(self.expected("a comma or closing angle brackets")),
}
}
Spanned(Token::RightArrow, _) => {
self.eat();
break;
}
_ => return Err(self.expected("closing angle brackets or a type name")),
}
}
arguments = TypeArguments::AngleBracketed(args);
};
Ok(Spanned(Type { name, arguments }, span + self.tokens.span()))
}
}

8
programs/aidl/status.md Normal file
View File

@ -0,0 +1,8 @@
# aidl status
## parser
[x] `Use module.thing`
[x] `Alias Thing = OtherThing`
## codegen
TODO!

View File

@ -1 +0,0 @@
nightly

3
rust-toolchain.toml Normal file
View File

@ -0,0 +1,3 @@
[toolchain]
channel = "nightly"
components = ["cargo", "clippy", "rustfmt", "rust-analyzer"]