64 lines
2.1 KiB
Rust
64 lines
2.1 KiB
Rust
use crate::{lexer::Token, list::List, value::Value};
|
|
use chumsky::{prelude::*, Stream};
|
|
use logos::{Lexer, Logos};
|
|
|
|
/// Parse source string into a value
|
|
pub fn read(src: &str) -> Result<Vec<Value>, Vec<Simple<Token<'_>>>> {
|
|
parser().parse(stream_of_lexer(Token::lexer(src)))
|
|
}
|
|
|
|
fn parser<'a>() -> impl Parser<Token<'a>, Vec<Value<'a>>, Error = Simple<Token<'a>>> {
|
|
recursive(|value| {
|
|
let atom = select! {
|
|
Token::Symbol("true") => Value::Bool(true),
|
|
Token::Symbol("false") => Value::Bool(false),
|
|
Token::Symbol("nil") => Value::Nil,
|
|
Token::Symbol(s) => Value::Symbol(s.into()),
|
|
Token::Keyword(k) => Value::Keyword(k.into()),
|
|
Token::String(s) => Value::String(s.into()),
|
|
Token::Number(n) => Value::Number(n),
|
|
};
|
|
|
|
let list = value
|
|
.clone()
|
|
.repeated()
|
|
.map(List::from_vec)
|
|
.map(Box::new)
|
|
.map(Value::List)
|
|
.delimited_by(just(Token::LeftParen), just(Token::RightParen));
|
|
|
|
let vector = value
|
|
.clone()
|
|
.repeated()
|
|
.map(Value::Vector)
|
|
.delimited_by(just(Token::LeftBracket), just(Token::RightBracket));
|
|
|
|
let map = value
|
|
.clone()
|
|
.then(value.clone())
|
|
.repeated()
|
|
.collect()
|
|
.map(Value::Map)
|
|
.delimited_by(just(Token::LeftCurly), just(Token::RightCurly));
|
|
|
|
let quote = just(Token::Quote).ignore_then(value).map(|value| {
|
|
Value::List(Box::new(List::Cons(
|
|
Value::Symbol("quote".into()),
|
|
Box::new(List::Cons(value, Box::new(List::Nil))),
|
|
)))
|
|
});
|
|
|
|
atom.or(list).or(vector).or(map).or(quote)
|
|
})
|
|
.repeated()
|
|
.then_ignore(end())
|
|
}
|
|
|
|
/// Convert Logos' Lexer into Chumsky'a Stream
|
|
fn stream_of_lexer<'a>(
|
|
lexer: Lexer<'a, Token<'a>>,
|
|
) -> Stream<'_, Token<'_>, logos::Span, logos::SpannedIter<'_, Token<'_>>> {
|
|
let len = lexer.source().len();
|
|
Stream::from_iter(len..len + 1, lexer.spanned())
|
|
}
|