mirror of
https://github.com/azur1s/bobbylisp.git
synced 2024-10-16 02:37:40 -05:00
Compare commits
No commits in common. "a42415a90a55447da90e8b533e0a7df773f75972" and "a8aa4569c366d6f123addd55289a7c83e54be655" have entirely different histories.
a42415a90a
...
a8aa4569c3
3
Cargo.lock
generated
3
Cargo.lock
generated
|
@ -152,8 +152,6 @@ dependencies = [
|
||||||
name = "hycron"
|
name = "hycron"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ariadne",
|
|
||||||
"chumsky",
|
|
||||||
"clap",
|
"clap",
|
||||||
"lexer",
|
"lexer",
|
||||||
"parser",
|
"parser",
|
||||||
|
@ -207,6 +205,7 @@ dependencies = [
|
||||||
name = "parser"
|
name = "parser"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"ariadne",
|
||||||
"chumsky",
|
"chumsky",
|
||||||
"lexer",
|
"lexer",
|
||||||
]
|
]
|
||||||
|
|
|
@ -129,25 +129,4 @@ pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = Simple<char>> {
|
||||||
pub fn lex(src: String) -> (Option<Vec<(Token, std::ops::Range<usize>)>>, Vec<Simple<char>>) {
|
pub fn lex(src: String) -> (Option<Vec<(Token, std::ops::Range<usize>)>>, Vec<Simple<char>>) {
|
||||||
let (tokens, lex_error) = lexer().parse_recovery(src.as_str());
|
let (tokens, lex_error) = lexer().parse_recovery(src.as_str());
|
||||||
return (tokens, lex_error);
|
return (tokens, lex_error);
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn lex_let_simple() {
|
|
||||||
let (tokens, err) = lex("let x: Int = 1;".to_string());
|
|
||||||
|
|
||||||
assert_eq!(tokens, Some(vec![
|
|
||||||
(Token::KwLet, 0..3),
|
|
||||||
(Token::Identifier("x".to_string()), 4..5),
|
|
||||||
(Token::Colon, 5..6),
|
|
||||||
(Token::Identifier("Int".to_string()), 7..10),
|
|
||||||
(Token::Assign, 11..12),
|
|
||||||
(Token::Int(1), 13..14),
|
|
||||||
(Token::SemiColon, 14..15),
|
|
||||||
]));
|
|
||||||
assert_eq!(err, vec![]);
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -8,6 +8,4 @@ edition = "2021"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
clap = { version = "3.0.14", features = ["derive"] }
|
clap = { version = "3.0.14", features = ["derive"] }
|
||||||
lexer = { path = "../lexer" }
|
lexer = { path = "../lexer" }
|
||||||
parser = { path = "../parser" }
|
parser = { path = "../parser" }
|
||||||
chumsky = "0.8.0"
|
|
||||||
ariadne = "0.1.5"
|
|
|
@ -1,7 +1,6 @@
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use clap::Parser as ArgParser;
|
use clap::Parser as ArgParser;
|
||||||
use ariadne::{Report, ReportKind, Label, Source, Color, Fmt};
|
|
||||||
use lexer::lex;
|
use lexer::lex;
|
||||||
use parser::parse;
|
use parser::parse;
|
||||||
|
|
||||||
|
@ -23,84 +22,22 @@ fn main() {
|
||||||
|
|
||||||
// Lex the file.
|
// Lex the file.
|
||||||
let (tokens, lex_error) = lex(src.clone());
|
let (tokens, lex_error) = lex(src.clone());
|
||||||
let (ast, parse_error) = parse(tokens.unwrap(), src.chars().count());
|
|
||||||
|
if lex_error.is_empty() {
|
||||||
|
log(0, "Lexing successful.");
|
||||||
|
|
||||||
lex_error.into_iter()
|
let (ast, parse_error) = parse(tokens.unwrap(), src.chars().count());
|
||||||
.map(|e| e.map(|e| e.to_string()))
|
|
||||||
.chain(parse_error.into_iter().map(|e| e.map(|tok| tok.to_string())))
|
|
||||||
.for_each(|e| {
|
|
||||||
let report = Report::build(ReportKind::Error, (), e.span().start);
|
|
||||||
|
|
||||||
let report = match e.reason() {
|
if parse_error.is_empty() {
|
||||||
chumsky::error::SimpleReason::Unclosed { span, delimiter } => report
|
|
||||||
.with_message(format!(
|
|
||||||
"Unclosed delimiter {}",
|
|
||||||
delimiter.fg(Color::Yellow)
|
|
||||||
))
|
|
||||||
.with_label(
|
|
||||||
Label::new(span.clone())
|
|
||||||
.with_message(format!(
|
|
||||||
"Expected closing delimiter {}",
|
|
||||||
delimiter.fg(Color::Yellow)
|
|
||||||
))
|
|
||||||
.with_color(Color::Yellow)
|
|
||||||
)
|
|
||||||
.with_label(
|
|
||||||
Label::new(e.span())
|
|
||||||
.with_message(format!(
|
|
||||||
"Must be closed before this {}",
|
|
||||||
e.found()
|
|
||||||
.unwrap_or(&"end of file".to_string())
|
|
||||||
.fg(Color::Red)
|
|
||||||
))
|
|
||||||
.with_color(Color::Red)
|
|
||||||
),
|
|
||||||
|
|
||||||
chumsky::error::SimpleReason::Unexpected => report
|
|
||||||
.with_message(format!(
|
|
||||||
"{}, expected {}",
|
|
||||||
|
|
||||||
if e.found().is_some() {"Unexpected token in input" }
|
|
||||||
else { "Unexpected end of input" },
|
|
||||||
|
|
||||||
if e.expected().len() == 0 { "something else".to_string().fg(Color::Green) }
|
|
||||||
else {
|
|
||||||
e.expected()
|
|
||||||
.map(|expected| match expected {
|
|
||||||
Some(expected) => expected.to_string(),
|
|
||||||
None => "end of input".to_string()
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(", ")
|
|
||||||
.fg(Color::Green)
|
|
||||||
}
|
|
||||||
))
|
|
||||||
.with_label(
|
|
||||||
Label::new(e.span())
|
|
||||||
.with_message(format!(
|
|
||||||
"Unexpected token {}",
|
|
||||||
e.found()
|
|
||||||
.unwrap_or(&"EOF".to_string())
|
|
||||||
.fg(Color::Red)
|
|
||||||
))
|
|
||||||
.with_color(Color::Red)
|
|
||||||
),
|
|
||||||
_ => {
|
|
||||||
println!("{:?}", e);
|
|
||||||
todo!();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
report.finish().print(Source::from(&src)).unwrap();
|
|
||||||
});
|
|
||||||
|
|
||||||
match ast {
|
|
||||||
Some(ast) => {
|
|
||||||
println!("{:#?}", ast);
|
println!("{:#?}", ast);
|
||||||
},
|
log(0, "Parsing successful.");
|
||||||
None => {
|
} else {
|
||||||
log(2, "Failed to parse.");
|
println!("{:#?}", parse_error);
|
||||||
|
log(2, "Parsing failed.");
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
println!("{:#?}", lex_error);
|
||||||
|
log(2, "Lexing failed.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,8 @@ use std::fmt::Display;
|
||||||
pub fn log<T: Display>(level: i8, msg: T) {
|
pub fn log<T: Display>(level: i8, msg: T) {
|
||||||
match level {
|
match level {
|
||||||
0 => println!("\x1b[92m[INFO]\x1b[0m {}", msg),
|
0 => println!("\x1b[92m[INFO]\x1b[0m {}", msg),
|
||||||
1 => println!("\x1b[93m[WARN]\x1b[0m {}", msg),
|
1 => println!("[WARN] {}", msg),
|
||||||
2 => println!("\x1b[91m[ERRS]\x1b[0m {}", msg),
|
2 => println!("[ERRO] {}", msg),
|
||||||
_ => println!("{}", msg),
|
_ => println!("{}", msg),
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -7,4 +7,5 @@ edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
lexer = { path = "../lexer" }
|
lexer = { path = "../lexer" }
|
||||||
chumsky = "0.8.0"
|
chumsky = "0.8.0"
|
||||||
|
ariadne = "0.1.5"
|
|
@ -228,24 +228,4 @@ pub fn parse(tokens: Vec<(Token, std::ops::Range<usize>)>, len: usize) -> (Optio
|
||||||
));
|
));
|
||||||
|
|
||||||
return (ast, parse_error)
|
return (ast, parse_error)
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_simple() {
|
|
||||||
let (_, err) = parse(vec![
|
|
||||||
(Token::KwLet, 0..3),
|
|
||||||
(Token::Identifier("x".to_string()), 4..5),
|
|
||||||
(Token::Colon, 5..6),
|
|
||||||
(Token::Identifier("Int".to_string()), 7..10),
|
|
||||||
(Token::Assign, 11..12),
|
|
||||||
(Token::Int(1), 13..14),
|
|
||||||
(Token::SemiColon, 14..15),
|
|
||||||
], 15);
|
|
||||||
|
|
||||||
assert_eq!(err, vec![]);
|
|
||||||
}
|
|
||||||
}
|
}
|
Loading…
Reference in a new issue