forked from AbleOS/ableos_userland
how does Able envision codegen
This commit is contained in:
parent
9f36b7ef7f
commit
564b42fdde
|
@ -7,5 +7,9 @@ edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
codespan-reporting = "0.11.1"
|
codespan-reporting = "0.11.1"
|
||||||
|
derive_more = "0.99.17"
|
||||||
logos = "0"
|
logos = "0"
|
||||||
|
proc-macro2 = "1.0.56"
|
||||||
|
quote = "1.0.26"
|
||||||
|
syn = "2.0.15"
|
||||||
thiserror = "1"
|
thiserror = "1"
|
||||||
|
|
19
programs/aidl/assets/error_example.idl
Normal file
19
programs/aidl/assets/error_example.idl
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
Type UUID = Array<U8, 16>;
|
||||||
|
|
||||||
|
Type Nanoseconds = U32;
|
||||||
|
|
||||||
|
Structure Duration {
|
||||||
|
secs: U64
|
||||||
|
nanos: Nanoseconds
|
||||||
|
}
|
||||||
|
|
||||||
|
Structure LinkedList<T> {
|
||||||
|
data: T,
|
||||||
|
child: Option<LinkedList<T>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
Constant VERSION = Version {
|
||||||
|
major: 1,
|
||||||
|
minor: 0,
|
||||||
|
patch: 0
|
||||||
|
};
|
|
@ -1,13 +0,0 @@
|
||||||
Type UUID = Array[U8; 16];
|
|
||||||
|
|
||||||
Type Nanoseconds = U32;
|
|
||||||
|
|
||||||
Structure Duration{
|
|
||||||
secs: U64,
|
|
||||||
nanos: Nanoseconds,
|
|
||||||
}
|
|
||||||
|
|
||||||
Structure LinkedList{
|
|
||||||
data: Any,
|
|
||||||
child: Option<LinkedList>,
|
|
||||||
}
|
|
|
@ -1,11 +1,11 @@
|
||||||
// core provides lots of useful types like String and Byte
|
// core provides lots of useful types like String and Byte
|
||||||
Use core;
|
Use core;
|
||||||
|
|
||||||
Constant VERSION = Version {
|
Constant VERSION = Make Version {
|
||||||
major: 1,
|
major: 1,
|
||||||
minor: 0,
|
minor: 0,
|
||||||
patch: 0,
|
patch: 0,
|
||||||
}
|
};
|
||||||
|
|
||||||
Alias Path = String;
|
Alias Path = String;
|
||||||
|
|
||||||
|
@ -15,10 +15,10 @@ Structure File {
|
||||||
}
|
}
|
||||||
|
|
||||||
Interface File {
|
Interface File {
|
||||||
function new accepts(Path) returns(None);
|
Function new Takes(Path) Returns(None);
|
||||||
|
|
||||||
// Open in this iteration assumes the file exists
|
// Open in this iteration assumes the file exists
|
||||||
function open accepts(Path) returns(File);
|
Function open Takes(Path) Returns(File);
|
||||||
|
|
||||||
function close accepts(File) returns(None);
|
Function close Takes(File) Returns(None);
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ pub enum Item {
|
||||||
Constant(ItemConstant),
|
Constant(ItemConstant),
|
||||||
Function(Function),
|
Function(Function),
|
||||||
Structure(ItemStructure),
|
Structure(ItemStructure),
|
||||||
Enumeration(ItemEnumeration)
|
Enumeration(ItemEnumeration),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
|
@ -42,7 +42,7 @@ impl Type {
|
||||||
pub fn infer() -> Self {
|
pub fn infer() -> Self {
|
||||||
Self {
|
Self {
|
||||||
name: String::from(INFER_TYPE),
|
name: String::from(INFER_TYPE),
|
||||||
arguments: TypeArguments::None
|
arguments: TypeArguments::None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -82,6 +82,7 @@ pub struct ItemInterface {
|
||||||
pub struct ItemStructure {
|
pub struct ItemStructure {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub fields: HashMap<String, Type>,
|
pub fields: HashMap<String, Type>,
|
||||||
|
pub arguments: TypeArguments,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -100,13 +101,13 @@ pub struct ItemConstant {
|
||||||
pub struct ItemEnumeration {
|
pub struct ItemEnumeration {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub arguments: TypeArguments,
|
pub arguments: TypeArguments,
|
||||||
pub variants: Vec<EnumerationVariant>
|
pub variants: Vec<EnumerationVariant>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct EnumerationVariant {
|
pub struct EnumerationVariant {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub content: EnumerationContent
|
pub content: EnumerationContent,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
|
@ -115,7 +116,7 @@ pub enum EnumerationContent {
|
||||||
None,
|
None,
|
||||||
Tuple(Vec<Type>),
|
Tuple(Vec<Type>),
|
||||||
Structure(HashMap<String, Type>),
|
Structure(HashMap<String, Type>),
|
||||||
Value(NumberLiteral)
|
Value(NumberLiteral),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -143,22 +144,32 @@ pub enum Literal {
|
||||||
Char(char),
|
Char(char),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, derive_more::Display)]
|
||||||
pub enum NumberLiteral {
|
pub enum NumberLiteral {
|
||||||
|
#[display(fmt = "{_0}ptr")]
|
||||||
Ptr(usize),
|
Ptr(usize),
|
||||||
|
|
||||||
|
#[display(fmt = "{_0}u8")]
|
||||||
U8(u8),
|
U8(u8),
|
||||||
|
#[display(fmt = "{_0}i8")]
|
||||||
I8(i8),
|
I8(i8),
|
||||||
|
|
||||||
|
#[display(fmt = "{_0}u16")]
|
||||||
U16(u16),
|
U16(u16),
|
||||||
|
#[display(fmt = "{_0}i16")]
|
||||||
I16(i16),
|
I16(i16),
|
||||||
|
|
||||||
|
#[display(fmt = "{_0}u32")]
|
||||||
U32(u32),
|
U32(u32),
|
||||||
|
#[display(fmt = "{_0}i32")]
|
||||||
I32(i32),
|
I32(i32),
|
||||||
|
|
||||||
|
#[display(fmt = "{_0}u64")]
|
||||||
U64(u64),
|
U64(u64),
|
||||||
|
#[display(fmt = "{_0}i64")]
|
||||||
I64(i64),
|
I64(i64),
|
||||||
|
|
||||||
|
#[display(fmt = "{_0}")]
|
||||||
Infer(i64),
|
Infer(i64),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
10
programs/aidl/src/codegen.rs
Normal file
10
programs/aidl/src/codegen.rs
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use crate::ast::IDLModule;
|
||||||
|
|
||||||
|
use proc_macro2::{TokenStream, Ident, Span};
|
||||||
|
use quote::quote;
|
||||||
|
|
||||||
|
pub fn generate(module: IDLModule) -> TokenStream {
|
||||||
|
quote! {}
|
||||||
|
}
|
|
@ -5,119 +5,175 @@ use std::{
|
||||||
|
|
||||||
use logos::Logos;
|
use logos::Logos;
|
||||||
|
|
||||||
#[derive(Logos, Debug, PartialEq)]
|
#[derive(Logos, Debug, PartialEq, derive_more::Display, Clone)]
|
||||||
#[logos(skip r"[ \t\n\f]+")]
|
#[logos(skip r"[ \t\n\f]+")]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
#[token("{")]
|
#[token("{")]
|
||||||
|
#[display(fmt = "{{")]
|
||||||
LeftCurly,
|
LeftCurly,
|
||||||
|
|
||||||
#[token("}")]
|
#[token("}")]
|
||||||
|
#[display(fmt = "}}")]
|
||||||
RightCurly,
|
RightCurly,
|
||||||
|
|
||||||
#[token("(")]
|
#[token("(")]
|
||||||
|
#[display(fmt = "(")]
|
||||||
LeftParen,
|
LeftParen,
|
||||||
|
|
||||||
#[token(")")]
|
#[token(")")]
|
||||||
|
#[display(fmt = ")")]
|
||||||
RightParen,
|
RightParen,
|
||||||
|
|
||||||
#[token(";")]
|
#[token(";")]
|
||||||
|
#[display(fmt = ";")]
|
||||||
Semicolon,
|
Semicolon,
|
||||||
|
|
||||||
#[token(":")]
|
#[token(":")]
|
||||||
|
#[display(fmt = ":")]
|
||||||
Colon,
|
Colon,
|
||||||
|
|
||||||
#[token("<")]
|
#[token("<")]
|
||||||
|
#[display(fmt = "<")]
|
||||||
LeftArrow,
|
LeftArrow,
|
||||||
|
|
||||||
#[token(">")]
|
#[token(">")]
|
||||||
|
#[display(fmt = ">")]
|
||||||
RightArrow,
|
RightArrow,
|
||||||
|
|
||||||
#[token(",")]
|
#[token(",")]
|
||||||
|
#[display(fmt = ",")]
|
||||||
Comma,
|
Comma,
|
||||||
|
|
||||||
#[token("=")]
|
#[token("=")]
|
||||||
|
#[display(fmt = "=")]
|
||||||
Equals,
|
Equals,
|
||||||
|
|
||||||
#[token(".")]
|
#[token(".")]
|
||||||
|
#[display(fmt = ".")]
|
||||||
Dot,
|
Dot,
|
||||||
|
|
||||||
// why
|
// why
|
||||||
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| lex.slice().strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
|
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| lex.slice().strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
|
||||||
|
#[display(fmt = "\"{_0}\"")]
|
||||||
StringLiteral(String),
|
StringLiteral(String),
|
||||||
|
|
||||||
#[regex(r"'.'", |lex| lex.slice().strip_prefix('\'')?.strip_suffix('\'')?.parse().ok())]
|
#[regex(r"'.'", |lex| lex.slice().strip_prefix('\'')?.strip_suffix('\'')?.parse().ok())]
|
||||||
|
#[display(fmt = "{_0}")]
|
||||||
CharLiteral(char),
|
CharLiteral(char),
|
||||||
|
|
||||||
#[regex(r#"(-)?\d+"#, |lex| lex.slice().parse().ok())]
|
#[regex(r#"(-)?\d+"#, |lex| lex.slice().parse().ok())]
|
||||||
|
#[display(fmt = "{_0}")]
|
||||||
NumberLiteral(i64),
|
NumberLiteral(i64),
|
||||||
|
|
||||||
#[regex(
|
#[regex(
|
||||||
r"(ptr|u8|i8|u16|i16|u32|i32|u64|i64|f32|f64)",
|
"(ptr|u8|i8|u16|i16|u32|i32|u64|i64|f32|f64)",
|
||||||
|lex| NumberSuffix::lexer(lex.slice()).next().and_then(Result::ok)
|
|lex| NumberSuffix::lexer(lex.slice()).next().and_then(Result::ok)
|
||||||
)]
|
)]
|
||||||
|
#[display(fmt = "{_0}")]
|
||||||
NumberSuffix(NumberSuffix),
|
NumberSuffix(NumberSuffix),
|
||||||
|
|
||||||
#[regex(r#"[a-zA-Z_][a-zA-Z\d_]*"#, |lex| Ident::lexer(lex.slice()).next().and_then(Result::ok))]
|
#[regex(r#"[a-zA-Z_][a-zA-Z\d_]*"#, |lex| Ident::lexer(lex.slice()).next().and_then(Result::ok))]
|
||||||
|
#[display(fmt = "{_0}")]
|
||||||
Ident(Ident),
|
Ident(Ident),
|
||||||
|
|
||||||
#[regex(r"//.*", |lex| lex.slice().parse().ok())]
|
#[regex(r"//.*", |lex| lex.slice().parse().ok())]
|
||||||
|
#[display(fmt = "//{_0}")]
|
||||||
Comment(String),
|
Comment(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Logos, Debug, PartialEq, Eq)]
|
#[derive(Logos, Debug, Clone, PartialEq, Eq, derive_more::Display)]
|
||||||
pub enum Ident {
|
pub enum Ident {
|
||||||
#[token("Interface")]
|
#[token("Interface")]
|
||||||
|
#[display(fmt = "Interface")]
|
||||||
Interface,
|
Interface,
|
||||||
#[token("Function")]
|
#[token("Function")]
|
||||||
|
#[display(fmt = "Function")]
|
||||||
Function,
|
Function,
|
||||||
#[token("Constant")]
|
#[token("Constant")]
|
||||||
|
#[display(fmt = "Constant")]
|
||||||
Constant,
|
Constant,
|
||||||
#[token("Structure")]
|
#[token("Structure")]
|
||||||
|
#[display(fmt = "Structure")]
|
||||||
Structure,
|
Structure,
|
||||||
#[token("Alias")]
|
#[token("Alias")]
|
||||||
|
#[display(fmt = "Alias")]
|
||||||
Alias,
|
Alias,
|
||||||
#[token("Enumeration")]
|
#[token("Enumeration")]
|
||||||
|
#[display(fmt = "Enumeration")]
|
||||||
Enumeration,
|
Enumeration,
|
||||||
#[token("Use")]
|
#[token("Use")]
|
||||||
|
#[display(fmt = "Use")]
|
||||||
Use,
|
Use,
|
||||||
#[token("Make")]
|
#[token("Make")]
|
||||||
|
#[display(fmt = "Make")]
|
||||||
Make,
|
Make,
|
||||||
#[token("Takes")]
|
#[token("Takes")]
|
||||||
|
#[display(fmt = "Takes")]
|
||||||
Takes,
|
Takes,
|
||||||
#[token("Returns")]
|
#[token("Returns")]
|
||||||
|
#[display(fmt = "Returns")]
|
||||||
Returns,
|
Returns,
|
||||||
#[token("_")]
|
#[token("_")]
|
||||||
|
#[display(fmt = "_")]
|
||||||
Underscore,
|
Underscore,
|
||||||
#[regex(r"[a-zA-Z_][a-zA-Z\d_]*", |lex| lex.slice().parse().ok())]
|
#[regex(r"[a-zA-Z_][a-zA-Z\d_]*", |lex| lex.slice().parse().ok())]
|
||||||
|
#[display(fmt = "{_0}")]
|
||||||
Other(String),
|
Other(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Logos, Debug, PartialEq, Eq)]
|
#[derive(Logos, Debug, Clone, Copy, PartialEq, Eq, derive_more::Display)]
|
||||||
pub enum NumberSuffix {
|
pub enum NumberSuffix {
|
||||||
#[token("Ptr")]
|
#[token("ptr")]
|
||||||
|
#[display(fmt = "ptr")]
|
||||||
Ptr,
|
Ptr,
|
||||||
#[token("u8")]
|
#[token("u8")]
|
||||||
|
#[display(fmt = "u8")]
|
||||||
U8,
|
U8,
|
||||||
#[token("i8")]
|
#[token("i8")]
|
||||||
|
#[display(fmt = "i8")]
|
||||||
I8,
|
I8,
|
||||||
#[token("u16")]
|
#[token("u16")]
|
||||||
|
#[display(fmt = "u16")]
|
||||||
U16,
|
U16,
|
||||||
#[token("i16")]
|
#[token("i16")]
|
||||||
|
#[display(fmt = "i16")]
|
||||||
I16,
|
I16,
|
||||||
#[token("u32")]
|
#[token("u32")]
|
||||||
|
#[display(fmt = "u32")]
|
||||||
U32,
|
U32,
|
||||||
#[token("i32")]
|
#[token("i32")]
|
||||||
|
#[display(fmt = "i32")]
|
||||||
I32,
|
I32,
|
||||||
#[token("u64")]
|
#[token("u64")]
|
||||||
|
#[display(fmt = "u64")]
|
||||||
U64,
|
U64,
|
||||||
#[token("i64")]
|
#[token("i64")]
|
||||||
|
#[display(fmt = "i64")]
|
||||||
I64,
|
I64,
|
||||||
#[token("f32")]
|
#[token("f32")]
|
||||||
|
#[display(fmt = "f32")]
|
||||||
F32,
|
F32,
|
||||||
#[token("f64")]
|
#[token("f64")]
|
||||||
|
#[display(fmt = "f64")]
|
||||||
F64,
|
F64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl NumberSuffix {
|
||||||
|
pub const ALL_SUFFIXES: [&str; 11] = [
|
||||||
|
"ptr",
|
||||||
|
"u8",
|
||||||
|
"i8",
|
||||||
|
"u16",
|
||||||
|
"i16",
|
||||||
|
"u32",
|
||||||
|
"i32",
|
||||||
|
"u64",
|
||||||
|
"i64",
|
||||||
|
"f32",
|
||||||
|
"f64"
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Span(pub Range<usize>);
|
pub struct Span(pub Range<usize>);
|
||||||
impl Span {
|
impl Span {
|
||||||
|
|
|
@ -1,12 +1,20 @@
|
||||||
#![feature(result_option_inspect)]
|
#![feature(result_option_inspect)]
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use std::path::Path;
|
use std::{fmt::Display, path::Path, process::exit};
|
||||||
|
|
||||||
use codespan_reporting::{
|
use codespan_reporting::{
|
||||||
diagnostic::Label,
|
diagnostic::{Diagnostic, Label, Severity},
|
||||||
term::{termcolor::StandardStream, Config},
|
files::SimpleFile,
|
||||||
|
term::{
|
||||||
|
emit,
|
||||||
|
termcolor::{ColorSpec, StandardStream, StandardStreamLock},
|
||||||
|
Config, Styles,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
use lexer::{NumberSuffix, Token};
|
||||||
|
use logos::Logos;
|
||||||
|
use parser::TokenIterator;
|
||||||
|
|
||||||
use crate::lexer::Spanned;
|
use crate::lexer::Spanned;
|
||||||
|
|
||||||
|
@ -16,20 +24,95 @@ mod parser;
|
||||||
|
|
||||||
//const TEST: &str = include_str!("../assets/why.idl");
|
//const TEST: &str = include_str!("../assets/why.idl");
|
||||||
|
|
||||||
|
fn precheck<N: Display + Clone, S: AsRef<str>>(
|
||||||
|
writer: &mut StandardStreamLock<'_>,
|
||||||
|
config: &Config,
|
||||||
|
file: &SimpleFile<N, S>,
|
||||||
|
) {
|
||||||
|
let mut lexer = TokenIterator::new(file.source().as_ref());
|
||||||
|
let mut diagnostics = vec![];
|
||||||
|
let mut previous = lexer.peek().ok().map(|Spanned(a, b)| Spanned(a.clone(), b));
|
||||||
|
|
||||||
|
while let Ok(Spanned(token, span)) = lexer.next() {
|
||||||
|
let prev = Spanned(token.clone(), span.clone());
|
||||||
|
match token {
|
||||||
|
Token::Ident(lexer::Ident::Other(t)) if t == "Type" => {
|
||||||
|
diagnostics.push(
|
||||||
|
Diagnostic::error()
|
||||||
|
.with_labels(vec![Label::primary((), span.0)])
|
||||||
|
.with_message("`Type` is not supported anymore.")
|
||||||
|
.with_notes(vec!["use `Alias` instead of `Type`".into()]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Token::Ident(lexer::Ident::Other(ident))
|
||||||
|
if lexer
|
||||||
|
.peek()
|
||||||
|
.is_ok_and(|Spanned(a, _)| matches!(a, Token::LeftCurly))
|
||||||
|
&& previous.is_some_and(|Spanned(ref a, _)| matches!(a, Token::Equals)) =>
|
||||||
|
{
|
||||||
|
diagnostics.push(
|
||||||
|
Diagnostic::error()
|
||||||
|
.with_message("Unknown expression")
|
||||||
|
.with_labels(vec![Label::primary((), span.0.clone())])
|
||||||
|
.with_notes(vec![
|
||||||
|
format!("add `Make` before the structure name to create a Make expression that will construct the `{ident}` structure"
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Token::Ident(lexer::Ident::Other(ident))
|
||||||
|
if NumberSuffix::ALL_SUFFIXES.contains(&ident.to_lowercase().as_str()) =>
|
||||||
|
{
|
||||||
|
diagnostics.push(
|
||||||
|
Diagnostic::warning()
|
||||||
|
.with_message("Potentially invalid use of an uppercased number type")
|
||||||
|
.with_labels(vec![Label::primary((), span.0)])
|
||||||
|
.with_notes(vec![format!(
|
||||||
|
"Replace {ident} with {}",
|
||||||
|
ident.to_lowercase()
|
||||||
|
), "Code generation might fail".into()]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
previous = Some(prev);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !diagnostics.is_empty() {
|
||||||
|
let mut was_fatal = false;
|
||||||
|
for diagnostic in diagnostics {
|
||||||
|
if let Severity::Error | Severity::Bug = &diagnostic.severity {
|
||||||
|
was_fatal = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
emit(writer, config, file, &diagnostic).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
if was_fatal {
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let mut args = std::env::args();
|
let mut args = std::env::args();
|
||||||
args.next().unwrap();
|
args.next().unwrap();
|
||||||
if let Some(file) = args.next() {
|
if let Some(file) = args.next() {
|
||||||
let path = Path::new(&file);
|
let path = Path::new(&file);
|
||||||
dbg!(path);
|
|
||||||
let codespan_file = codespan_reporting::files::SimpleFile::new(
|
let codespan_file = codespan_reporting::files::SimpleFile::new(
|
||||||
&file,
|
&file,
|
||||||
std::fs::read_to_string(path).unwrap(),
|
std::fs::read_to_string(path).unwrap(),
|
||||||
);
|
);
|
||||||
let writer = StandardStream::stdout(codespan_reporting::term::termcolor::ColorChoice::Auto);
|
let writer = StandardStream::stdout(codespan_reporting::term::termcolor::ColorChoice::Auto);
|
||||||
let config = Config::default();
|
let config = Config {
|
||||||
|
tab_width: 2,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
precheck(&mut writer.lock(), &config, &codespan_file);
|
||||||
|
|
||||||
match parser::parse(codespan_file.source()) {
|
match parser::parse(codespan_file.source()) {
|
||||||
Ok(ast) => println!("SUCCESS: \n{:#?}", ast),
|
Ok(ast) => println!("{:#?}", ast),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let msg = e.to_string();
|
let msg = e.to_string();
|
||||||
let label = match e {
|
let label = match e {
|
||||||
|
|
|
@ -12,7 +12,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
use std::iter::Iterator;
|
use std::iter::Iterator;
|
||||||
|
|
||||||
struct TokenIterator<'a> {
|
pub struct TokenIterator<'a> {
|
||||||
lexer: Lexer<'a, Token>,
|
lexer: Lexer<'a, Token>,
|
||||||
peeked: Option<Option<Token>>,
|
peeked: Option<Option<Token>>,
|
||||||
}
|
}
|
||||||
|
@ -102,11 +102,17 @@ impl<'a> Parser<'a> {
|
||||||
fn ask_ident(&mut self) -> Result<Spanned<String>, ParserError> {
|
fn ask_ident(&mut self) -> Result<Spanned<String>, ParserError> {
|
||||||
Ok(
|
Ok(
|
||||||
match self.get_real(
|
match self.get_real(
|
||||||
|token| matches!(token, Token::Ident(Ident::Other(_) | Ident::Underscore)),
|
|token| {
|
||||||
|
matches!(
|
||||||
|
token,
|
||||||
|
Token::Ident(Ident::Other(_) | Ident::Underscore) | Token::NumberSuffix(_)
|
||||||
|
)
|
||||||
|
},
|
||||||
"an identifier",
|
"an identifier",
|
||||||
)? {
|
)? {
|
||||||
Spanned(Token::Ident(Ident::Other(ident)), span) => Spanned(ident, span),
|
Spanned(Token::Ident(Ident::Other(ident)), span) => Spanned(ident, span),
|
||||||
Spanned(Token::Ident(Ident::Underscore), span) => Spanned("_".to_owned(), span),
|
Spanned(Token::Ident(Ident::Underscore), span) => Spanned("_".to_owned(), span),
|
||||||
|
Spanned(Token::NumberSuffix(suffix), span) => Spanned(suffix.to_string(), span),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::{lexer::{Ident, Spanned, Token}, ast::{Type, ItemStructure}};
|
use crate::{
|
||||||
|
ast::{ItemStructure, Type},
|
||||||
|
lexer::{Ident, Spanned, Token},
|
||||||
|
};
|
||||||
|
|
||||||
use super::{Parser, ParserError};
|
use super::{Parser, ParserError};
|
||||||
|
|
||||||
|
@ -11,7 +14,7 @@ impl<'a> Parser<'a> {
|
||||||
"the `Structure` keyword",
|
"the `Structure` keyword",
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let Spanned(name, _) = self.ask_ident()?;
|
let Spanned(Type { name, arguments }, _) = self.ask_type()?;
|
||||||
let Spanned(_, _) = self.get_real(
|
let Spanned(_, _) = self.get_real(
|
||||||
|token| matches!(token, Token::LeftCurly),
|
|token| matches!(token, Token::LeftCurly),
|
||||||
"an opening curly brace (`{`)",
|
"an opening curly brace (`{`)",
|
||||||
|
@ -36,7 +39,14 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Spanned(Token::RightCurly, end) = self.tokens.next()? {
|
if let Spanned(Token::RightCurly, end) = self.tokens.next()? {
|
||||||
return Ok(Spanned(ItemStructure { name, fields }, span + end));
|
return Ok(Spanned(
|
||||||
|
ItemStructure {
|
||||||
|
name,
|
||||||
|
fields,
|
||||||
|
arguments,
|
||||||
|
},
|
||||||
|
span + end,
|
||||||
|
));
|
||||||
};
|
};
|
||||||
|
|
||||||
Err(self.expected("closing curly braces"))
|
Err(self.expected("closing curly braces"))
|
||||||
|
|
|
@ -7,6 +7,17 @@ use super::{Parser, ParserError};
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
pub fn ask_type(&mut self) -> Result<Spanned<Type>, ParserError> {
|
pub fn ask_type(&mut self) -> Result<Spanned<Type>, ParserError> {
|
||||||
|
if let Spanned(Token::NumberLiteral(_), _) = self.tokens.peek()? {
|
||||||
|
let Spanned(number, span) = self._ask_number_literal()?;
|
||||||
|
return Ok(Spanned(
|
||||||
|
Type {
|
||||||
|
name: number.to_string(),
|
||||||
|
arguments: TypeArguments::None,
|
||||||
|
},
|
||||||
|
span,
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
let Spanned(name, span) = self.ask_ident()?;
|
let Spanned(name, span) = self.ask_ident()?;
|
||||||
|
|
||||||
if name == INFER_TYPE {
|
if name == INFER_TYPE {
|
||||||
|
@ -28,7 +39,9 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match self.tokens.peek()? {
|
match self.tokens.peek()? {
|
||||||
Spanned(Token::Ident(_), _) => args.push(Box::new(self.ask_type()?.0)),
|
Spanned(Token::Ident(_) | Token::NumberLiteral(_), _) => {
|
||||||
|
args.push(Box::new(self.ask_type()?.0))
|
||||||
|
}
|
||||||
Spanned(Token::RightArrow, _) => {
|
Spanned(Token::RightArrow, _) => {
|
||||||
self.eat();
|
self.eat();
|
||||||
break;
|
break;
|
||||||
|
|
Loading…
Reference in a new issue