diff --git a/src/lexer.rs b/src/lexer.rs index 87ecd25..251c079 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -80,7 +80,7 @@ mod tests { use Token::*; fn assert_lex(src: &str, expected: &[Token]) { - assert_eq!(Token::lexer(src).collect::>(), expected) + assert_eq!(Token::lexer(src).collect::>(), expected); } #[test] fn gibberish() { diff --git a/src/parser.rs b/src/parser.rs index 7cadac4..04b9016 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,10 +1,12 @@ use crate::{lexer::Token, list::List, value::Value}; use chumsky::{prelude::*, Stream}; -use logos::{Lexer, Logos}; +use logos::Logos; /// Parse source string into a value pub fn read(src: &str) -> Result, Vec>>> { - parser().parse(stream_of_lexer(Token::lexer(src))) + let lexer = Token::lexer(src); + let len = lexer.source().len(); + parser().parse(Stream::from_iter(len..len + 1, lexer.spanned())) } fn parser<'a>() -> impl Parser, Vec>, Error = Simple>> { @@ -54,20 +56,12 @@ fn parser<'a>() -> impl Parser, Vec>, Error = Simple( - lexer: Lexer<'a, Token<'a>>, -) -> Stream<'_, Token<'_>, logos::Span, logos::SpannedIter<'_, Token<'_>>> { - let len = lexer.source().len(); - Stream::from_iter(len..len + 1, lexer.spanned()) -} - #[cfg(test)] mod tests { use super::*; fn assert_parse<'a>(src: &'a str, expected: &'a [Value<'a>]) { - assert_eq!(read(src).unwrap(), expected) + assert_eq!(read(src).unwrap(), expected); } #[test]