ondra05 2022-07-23 18:45:57 +02:00
parent 0d36a8ee4d
commit ec71e9a87e
2 changed files with 6 additions and 12 deletions

View File

@ -80,7 +80,7 @@ mod tests {
use Token::*;
fn assert_lex(src: &str, expected: &[Token]) {
assert_eq!(Token::lexer(src).collect::<Vec<_>>(), expected)
assert_eq!(Token::lexer(src).collect::<Vec<_>>(), expected);
}
#[test]
fn gibberish() {

View File

@ -1,10 +1,12 @@
use crate::{lexer::Token, list::List, value::Value};
use chumsky::{prelude::*, Stream};
use logos::{Lexer, Logos};
use logos::Logos;
/// Parse source string into a value
pub fn read(src: &str) -> Result<Vec<Value>, Vec<Simple<Token<'_>>>> {
parser().parse(stream_of_lexer(Token::lexer(src)))
let lexer = Token::lexer(src);
let len = lexer.source().len();
parser().parse(Stream::from_iter(len..len + 1, lexer.spanned()))
}
fn parser<'a>() -> impl Parser<Token<'a>, Vec<Value<'a>>, Error = Simple<Token<'a>>> {
@ -54,20 +56,12 @@ fn parser<'a>() -> impl Parser<Token<'a>, Vec<Value<'a>>, Error = Simple<Token<'
.then_ignore(end())
}
/// Convert Logos' Lexer into Chumsky'a Stream
fn stream_of_lexer<'a>(
lexer: Lexer<'a, Token<'a>>,
) -> Stream<'_, Token<'_>, logos::Span, logos::SpannedIter<'_, Token<'_>>> {
let len = lexer.source().len();
Stream::from_iter(len..len + 1, lexer.spanned())
}
#[cfg(test)]
mod tests {
use super::*;
fn assert_parse<'a>(src: &'a str, expected: &'a [Value<'a>]) {
assert_eq!(read(src).unwrap(), expected)
assert_eq!(read(src).unwrap(), expected);
}
#[test]